mirror of
https://github.com/makew0rld/amfora.git
synced 2025-02-02 15:07:34 -05:00
🚧 Feeds page rendering fixed, still untested
This commit is contained in:
parent
2357e25b07
commit
5c84991940
0
cache/favicons.go → cache/favicon.go
vendored
0
cache/favicons.go → cache/favicon.go
vendored
3
cache/cache.go → cache/page.go
vendored
3
cache/cache.go → cache/page.go
vendored
@ -1,6 +1,5 @@
|
|||||||
// Package cache provides an interface for a cache of strings, aka text/gemini pages, and redirects.
|
// Package cache provides an interface for a cache of strings, aka text/gemini pages, and redirects.
|
||||||
// It is fully thread safe.
|
// It is fully thread safe.
|
||||||
// The redirect cache is not limited.
|
|
||||||
package cache
|
package cache
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -22,7 +21,7 @@ func SetMaxPages(max int) {
|
|||||||
maxPages = max
|
maxPages = max
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetMaxSize sets the max size the cache can be, in bytes.
|
// SetMaxSize sets the max size the page cache can be, in bytes.
|
||||||
// A value <= 0 means infinite size.
|
// A value <= 0 means infinite size.
|
||||||
func SetMaxSize(max int) {
|
func SetMaxSize(max int) {
|
||||||
maxSize = max
|
maxSize = max
|
@ -5,6 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/makeworld-the-better-one/amfora/cache"
|
||||||
"github.com/makeworld-the-better-one/amfora/feeds"
|
"github.com/makeworld-the-better-one/amfora/feeds"
|
||||||
"github.com/makeworld-the-better-one/amfora/renderer"
|
"github.com/makeworld-the-better-one/amfora/renderer"
|
||||||
"github.com/makeworld-the-better-one/amfora/structs"
|
"github.com/makeworld-the-better-one/amfora/structs"
|
||||||
@ -15,19 +16,33 @@ var feedPageRaw = "# Feeds & Pages\n\nUpdates" + strings.Repeat(" ", 80-25) + "[
|
|||||||
|
|
||||||
var timeDay = 24 * time.Hour
|
var timeDay = 24 * time.Hour
|
||||||
|
|
||||||
|
var feedPageUpdated time.Time
|
||||||
|
|
||||||
// Feeds displays the feeds page on the current tab.
|
// Feeds displays the feeds page on the current tab.
|
||||||
func Feeds(t *tab) {
|
func Feeds(t *tab) {
|
||||||
// TODO; Decide about date in local time vs UTC
|
// Retrieve cached version if there hasn't been updates
|
||||||
// TODO: Cache
|
p, ok := cache.GetPage("about:feeds")
|
||||||
|
if feedPageUpdated == feeds.LastUpdated && ok {
|
||||||
|
setPage(t, p)
|
||||||
|
t.applyBottomBar()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
pe := feeds.GetPageEntries()
|
pe := feeds.GetPageEntries()
|
||||||
|
|
||||||
curDay := time.Time{}.Round(timeDay)
|
// curDay represents what day of posts the loop is on.
|
||||||
|
// It only goes backwards in time.
|
||||||
|
// It's initial setting means:
|
||||||
|
// only display posts older than a day in the future.
|
||||||
|
curDay := time.Now().Round(timeDay).Add(timeDay)
|
||||||
|
|
||||||
for _, entry := range pe.Entries {
|
for _, entry := range pe.Entries { // From new to old
|
||||||
if entry.Published.Round(timeDay).After(curDay) {
|
// Convert to local time, remove sub-day info
|
||||||
|
pub := entry.Published.In(time.Local).Round(timeDay)
|
||||||
|
|
||||||
|
if pub.Before(curDay) {
|
||||||
// This post is on a new day, add a day header
|
// This post is on a new day, add a day header
|
||||||
curDay := entry.Published.Round(timeDay)
|
curDay := pub
|
||||||
feedPageRaw += fmt.Sprintf("\n## %s\n\n", curDay.Format("Jan 02, 2006"))
|
feedPageRaw += fmt.Sprintf("\n## %s\n\n", curDay.Format("Jan 02, 2006"))
|
||||||
}
|
}
|
||||||
feedPageRaw += fmt.Sprintf("=>%s %s - %s\n", entry.URL, entry.Author, entry.Title)
|
feedPageRaw += fmt.Sprintf("=>%s %s - %s\n", entry.URL, entry.Author, entry.Title)
|
||||||
@ -42,6 +57,13 @@ func Feeds(t *tab) {
|
|||||||
Width: termW,
|
Width: termW,
|
||||||
Mediatype: structs.TextGemini,
|
Mediatype: structs.TextGemini,
|
||||||
}
|
}
|
||||||
|
cache.AddPage(&page)
|
||||||
setPage(t, &page)
|
setPage(t, &page)
|
||||||
t.applyBottomBar()
|
t.applyBottomBar()
|
||||||
|
|
||||||
|
feedPageUpdated = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
func feedInit() {
|
||||||
|
// TODO
|
||||||
}
|
}
|
||||||
|
@ -150,6 +150,7 @@ func modalInit() {
|
|||||||
|
|
||||||
bkmkInit()
|
bkmkInit()
|
||||||
dlInit()
|
dlInit()
|
||||||
|
feedInit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error displays an error on the screen in a modal.
|
// Error displays an error on the screen in a modal.
|
||||||
|
@ -30,7 +30,11 @@ var (
|
|||||||
ErrNotFeed = errors.New("not a valid feed")
|
ErrNotFeed = errors.New("not a valid feed")
|
||||||
)
|
)
|
||||||
|
|
||||||
var writeMu = sync.Mutex{}
|
var writeMu = sync.Mutex{} // Prevent concurrent writes to feeds.json file
|
||||||
|
|
||||||
|
// LastUpdated is the time when the in-memory data was last updated.
|
||||||
|
// It can be used to know if the feed page should be regenerated.
|
||||||
|
var LastUpdated time.Time
|
||||||
|
|
||||||
// Init should be called after config.Init.
|
// Init should be called after config.Init.
|
||||||
func Init() error {
|
func Init() error {
|
||||||
@ -42,6 +46,8 @@ func Init() error {
|
|||||||
return fmt.Errorf("feeds json is corrupted: %v", err) //nolint:goerr113
|
return fmt.Errorf("feeds json is corrupted: %v", err) //nolint:goerr113
|
||||||
}
|
}
|
||||||
|
|
||||||
|
LastUpdated = time.Now()
|
||||||
|
|
||||||
go updateAll()
|
go updateAll()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -70,6 +76,10 @@ func IsTracked(url string) bool {
|
|||||||
// GetFeed returns a Feed object and a bool indicating whether the passed
|
// GetFeed returns a Feed object and a bool indicating whether the passed
|
||||||
// content was actually recognized as a feed.
|
// content was actually recognized as a feed.
|
||||||
func GetFeed(mediatype, filename string, r io.Reader) (*gofeed.Feed, bool) {
|
func GetFeed(mediatype, filename string, r io.Reader) (*gofeed.Feed, bool) {
|
||||||
|
if r == nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
// Check mediatype and filename
|
// Check mediatype and filename
|
||||||
if mediatype != "application/atom+xml" && mediatype != "application/rss+xml" &&
|
if mediatype != "application/atom+xml" && mediatype != "application/rss+xml" &&
|
||||||
filename != "atom.xml" && filename != "feed.xml" &&
|
filename != "atom.xml" && filename != "feed.xml" &&
|
||||||
@ -119,34 +129,53 @@ func AddFeed(url string, feed *gofeed.Feed) error {
|
|||||||
|
|
||||||
data.feedMu.Lock()
|
data.feedMu.Lock()
|
||||||
data.Feeds[url] = feed
|
data.Feeds[url] = feed
|
||||||
data.feedMu.Unlock()
|
|
||||||
|
|
||||||
err := writeJSON()
|
err := writeJSON()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Don't use in-memory if it couldn't be saved
|
// Don't use in-memory if it couldn't be saved
|
||||||
data.feedMu.Lock()
|
|
||||||
delete(data.Feeds, url)
|
delete(data.Feeds, url)
|
||||||
data.feedMu.Unlock()
|
data.feedMu.Unlock()
|
||||||
return ErrSaving
|
return ErrSaving
|
||||||
}
|
}
|
||||||
|
data.feedMu.Unlock()
|
||||||
|
|
||||||
|
LastUpdated = time.Now()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddPage stores a page URL to track for changes.
|
// AddPage stores a page to track for changes.
|
||||||
// Do not use it to update a page, as it only resets the hash.
|
// It can be used to update the page as well, although the package
|
||||||
func AddPage(url string) error {
|
// will handle that on its own.
|
||||||
|
func AddPage(url string, r io.Reader) error {
|
||||||
|
if r == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
h := sha256.New()
|
||||||
|
if _, err := io.Copy(h, r); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newHash := fmt.Sprintf("%x", h.Sum(nil))
|
||||||
|
|
||||||
data.pageMu.Lock()
|
data.pageMu.Lock()
|
||||||
data.Pages[url] = &pageJSON{} // No hash yet
|
_, ok := data.Pages[url]
|
||||||
data.pageMu.Unlock()
|
if !ok || data.Pages[url].Hash != newHash {
|
||||||
|
// Page content is different, or it didn't exist
|
||||||
|
data.Pages[url] = &pageJSON{
|
||||||
|
Hash: newHash,
|
||||||
|
Changed: time.Now().UTC(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err := writeJSON()
|
err := writeJSON()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Don't use in-memory if it couldn't be saved
|
// Don't use in-memory if it couldn't be saved
|
||||||
data.pageMu.Lock()
|
|
||||||
delete(data.Pages, url)
|
delete(data.Pages, url)
|
||||||
data.pageMu.Unlock()
|
data.pageMu.Unlock()
|
||||||
return ErrSaving
|
return err
|
||||||
}
|
}
|
||||||
|
data.pageMu.Unlock()
|
||||||
|
|
||||||
|
LastUpdated = time.Now()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,32 +217,8 @@ func updatePage(url string) error {
|
|||||||
if res.Status != gemini.StatusSuccess {
|
if res.Status != gemini.StatusSuccess {
|
||||||
return ErrNotSuccess
|
return ErrNotSuccess
|
||||||
}
|
}
|
||||||
h := sha256.New()
|
|
||||||
if _, err := io.Copy(h, res.Body); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
newHash := fmt.Sprintf("%x", h.Sum(nil))
|
|
||||||
|
|
||||||
data.pageMu.Lock()
|
return AddPage(url, res.Body)
|
||||||
if data.Pages[url].Hash != newHash {
|
|
||||||
// Page content is different
|
|
||||||
data.Pages[url] = &pageJSON{
|
|
||||||
Hash: newHash,
|
|
||||||
Changed: time.Now().UTC(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
data.pageMu.Unlock()
|
|
||||||
|
|
||||||
err = writeJSON()
|
|
||||||
if err != nil {
|
|
||||||
// Don't use in-memory if it couldn't be saved
|
|
||||||
data.pageMu.Lock()
|
|
||||||
delete(data.Pages, url)
|
|
||||||
data.pageMu.Unlock()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// updateAll updates all feeds and pages.
|
// updateAll updates all feeds and pages.
|
||||||
@ -221,15 +226,14 @@ func updateAll() {
|
|||||||
// TODO: Is two goroutines the right amount?
|
// TODO: Is two goroutines the right amount?
|
||||||
|
|
||||||
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
|
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
|
||||||
// Each job is: []string{<type>, "url"}
|
// Each job is: [2]string{<type>, "url"}
|
||||||
// where <type> is "feed" or "page"
|
// where <type> is "feed" or "page"
|
||||||
|
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
for j := range jobs {
|
for j := range jobs {
|
||||||
if j[0] == "feed" {
|
if j[0] == "feed" {
|
||||||
updateFeed(j[1]) //nolint:errcheck
|
updateFeed(j[1]) //nolint:errcheck
|
||||||
}
|
} else if j[0] == "page" {
|
||||||
if j[0] == "page" {
|
|
||||||
updatePage(j[1]) //nolint:errcheck
|
updatePage(j[1]) //nolint:errcheck
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -278,8 +282,9 @@ func updateAll() {
|
|||||||
|
|
||||||
// GetPageEntries returns the current list of PageEntries
|
// GetPageEntries returns the current list of PageEntries
|
||||||
// for use in rendering a page.
|
// for use in rendering a page.
|
||||||
// The contents of the entries will never change, and this
|
// The contents of the returned entries will never change,
|
||||||
// function should be called again to get updates.
|
// so this function needs to be called again to get updates.
|
||||||
|
// It always returns sorted entries - by post time, from newest to oldest.
|
||||||
func GetPageEntries() *PageEntries {
|
func GetPageEntries() *PageEntries {
|
||||||
var pe PageEntries
|
var pe PageEntries
|
||||||
|
|
||||||
@ -290,6 +295,8 @@ func GetPageEntries() *PageEntries {
|
|||||||
|
|
||||||
var pub time.Time
|
var pub time.Time
|
||||||
|
|
||||||
|
// Try to use updated time first, then published
|
||||||
|
|
||||||
if !item.UpdatedParsed.IsZero() {
|
if !item.UpdatedParsed.IsZero() {
|
||||||
pub = *item.UpdatedParsed
|
pub = *item.UpdatedParsed
|
||||||
} else if !item.PublishedParsed.IsZero() {
|
} else if !item.PublishedParsed.IsZero() {
|
||||||
@ -321,5 +328,6 @@ func GetPageEntries() *PageEntries {
|
|||||||
data.RUnlock()
|
data.RUnlock()
|
||||||
|
|
||||||
sort.Sort(&pe)
|
sort.Sort(&pe)
|
||||||
|
|
||||||
return &pe
|
return &pe
|
||||||
}
|
}
|
||||||
|
@ -81,6 +81,7 @@ type PageEntry struct {
|
|||||||
|
|
||||||
// PageEntries is new-to-old list of Entry structs, used to create a feed page.
|
// PageEntries is new-to-old list of Entry structs, used to create a feed page.
|
||||||
// It should always be assumed to be sorted when used in other packages.
|
// It should always be assumed to be sorted when used in other packages.
|
||||||
|
// Sorted by post time, from newest to oldest.
|
||||||
type PageEntries struct {
|
type PageEntries struct {
|
||||||
Entries []*PageEntry
|
Entries []*PageEntry
|
||||||
}
|
}
|
||||||
@ -92,7 +93,7 @@ func (e *PageEntries) Len() int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (e *PageEntries) Less(i, j int) bool {
|
func (e *PageEntries) Less(i, j int) bool {
|
||||||
return e.Entries[i].Published.Before(e.Entries[j].Published)
|
return e.Entries[i].Published.After(e.Entries[j].Published)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *PageEntries) Swap(i, j int) {
|
func (e *PageEntries) Swap(i, j int) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user