1
0
mirror of https://github.com/makew0rld/amfora.git synced 2024-06-19 19:25:24 +00:00

🚧 Fix duplicate entries in feed page

This commit is contained in:
makeworld 2020-11-18 16:10:22 -05:00
parent cf5e65f75a
commit c72a225a43
3 changed files with 18 additions and 13 deletions

View File

@ -1,17 +1,14 @@
# Notes
## Temp
- Recalculating `about:feeds` adds pages multiple times to the view
- Only options for feed files is the download modal - there should be a feed modal before that one
- Auto feed detection fails on `ebc.li/atom.xml`
## Feeds (temp)
- Only options for "non-text" feed files is the download modal - there should be a feed modal before that one
- Auto feed detection fails on `ebc.li/atom.xml` - which is text
- TODO: remove all logger lines
## Issues
- URL for each tab should not be stored as a string - in the current code there's lots of reparsing the URL
## Regressions

View File

@ -19,9 +19,6 @@ import (
"github.com/spf13/viper"
)
var feedPageRaw = "# Feeds & Pages\n\nUpdates" + strings.Repeat(" ", 80-25) + "[Newest -> Oldest]\n" +
strings.Repeat("-", 80) + "\nSee the help (by pressing ?) for details on how to use this page.\n\n"
var feedPageUpdated time.Time
// toLocalDay truncates the provided time to a date only,
@ -38,11 +35,17 @@ func Feeds(t *tab) {
// Retrieve cached version if there hasn't been any updates
p, ok := cache.GetPage("about:feeds")
if feedPageUpdated.After(feeds.LastUpdated) && ok {
logger.Log.Println("using cached feeds page")
setPage(t, p)
t.applyBottomBar()
return
}
logger.Log.Println("started rendering feeds page")
feedPageRaw := "# Feeds & Pages\n\nUpdates" + strings.Repeat(" ", 80-25) + "[Newest -> Oldest]\n" +
strings.Repeat("-", 80) + "\nSee the help (by pressing ?) for details on how to use this page.\n\n"
// curDay represents what day of posts the loop is on.
// It only goes backwards in time.
// Its initial setting means:
@ -62,7 +65,7 @@ func Feeds(t *tab) {
if pub.Before(curDay) {
// This post is on a new day, add a day header
curDay := pub
curDay = pub
feedPageRaw += fmt.Sprintf("\n## %s\n\n", curDay.Format("Jan 02, 2006"))
}
feedPageRaw += fmt.Sprintf("=>%s %s - %s\n", entry.URL, entry.Author, entry.Title)
@ -82,6 +85,8 @@ func Feeds(t *tab) {
t.applyBottomBar()
feedPageUpdated = time.Now()
logger.Log.Println("done rendering feeds page")
}
// openFeedModal displays the "Add feed/page" modal
@ -159,7 +164,7 @@ func addFeedDirect(u string, feed *gofeed.Feed, tracked bool) {
}
}
// addFeed goes through the process of adding a bookmark for the current page.
// addFeed goes through the process of tracking the current page/feed.
// It is the high-level way of doing it. It should be called in a goroutine.
func addFeed() {
logger.Log.Println("display.addFeed called")

View File

@ -42,10 +42,12 @@ var LastUpdated time.Time
func Init() error {
f, err := os.Open(config.FeedPath)
if err == nil {
// File exists and could be opened
defer f.Close()
fi, err := f.Stat()
if err == nil && fi.Size() > 0 {
// File is not empty
dec := json.NewDecoder(f)
err = dec.Decode(&data)
if err != nil && err != io.EOF {
@ -156,13 +158,13 @@ func AddFeed(url string, feed *gofeed.Feed) error {
if !ok || !reflect.DeepEqual(feed, oldFeed) {
// Feeds are different, or there was never an old one
LastUpdated = time.Now()
data.Feeds[url] = feed
data.feedMu.Unlock()
err := writeJSON()
if err != nil {
return ErrSaving
}
LastUpdated = time.Now()
} else {
data.feedMu.Unlock()
}
@ -189,6 +191,8 @@ func AddPage(url string, r io.Reader) error {
_, ok := data.Pages[url]
if !ok || data.Pages[url].Hash != newHash {
// Page content is different, or it didn't exist
LastUpdated = time.Now()
data.Pages[url] = &pageJSON{
Hash: newHash,
Changed: time.Now().UTC(),
@ -199,7 +203,6 @@ func AddPage(url string, r io.Reader) error {
if err != nil {
return ErrSaving
}
LastUpdated = time.Now()
} else {
data.pageMu.Unlock()
}