2020-08-10 18:50:40 -04:00
|
|
|
package feeds
|
|
|
|
|
|
|
|
import (
|
2020-08-16 17:42:45 -04:00
|
|
|
"crypto/sha256"
|
2020-08-10 18:50:40 -04:00
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2020-08-16 17:42:45 -04:00
|
|
|
"mime"
|
2020-08-17 15:33:53 -04:00
|
|
|
urlPkg "net/url"
|
2020-08-10 18:50:40 -04:00
|
|
|
"os"
|
2020-08-16 17:42:45 -04:00
|
|
|
"path"
|
2020-08-10 18:50:40 -04:00
|
|
|
"sort"
|
|
|
|
"strings"
|
2020-08-16 17:42:45 -04:00
|
|
|
"sync"
|
|
|
|
"time"
|
2020-08-10 18:50:40 -04:00
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
"github.com/makeworld-the-better-one/amfora/client"
|
2020-08-10 18:50:40 -04:00
|
|
|
"github.com/makeworld-the-better-one/amfora/config"
|
2020-08-16 17:42:45 -04:00
|
|
|
"github.com/makeworld-the-better-one/go-gemini"
|
2020-08-10 18:50:40 -04:00
|
|
|
"github.com/mmcdole/gofeed"
|
|
|
|
)
|
|
|
|
|
2020-08-17 13:31:45 -04:00
|
|
|
// TODO: Test for deadlocks and whether there should be more
|
|
|
|
// goroutines for file writing or other things.
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
var (
|
|
|
|
ErrSaving = errors.New("couldn't save JSON to disk")
|
|
|
|
ErrNotSuccess = errors.New("status 20 not returned")
|
|
|
|
ErrNotFeed = errors.New("not a valid feed")
|
|
|
|
)
|
2020-08-10 18:50:40 -04:00
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
var writeMu = sync.Mutex{}
|
2020-08-10 18:50:40 -04:00
|
|
|
|
|
|
|
// Init should be called after config.Init.
|
|
|
|
func Init() error {
|
|
|
|
defer config.FeedJson.Close()
|
|
|
|
|
|
|
|
dec := json.NewDecoder(config.FeedJson)
|
|
|
|
err := dec.Decode(&data)
|
|
|
|
if err != nil && err != io.EOF {
|
|
|
|
return fmt.Errorf("feeds json is corrupted: %v", err)
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-17 15:33:53 -04:00
|
|
|
go updateAll()
|
|
|
|
return nil
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
// IsTracked returns true if the feed/page URL is already being tracked.
|
2020-08-10 18:50:40 -04:00
|
|
|
func IsTracked(url string) bool {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RLock()
|
2020-08-10 18:50:40 -04:00
|
|
|
for u := range data.Feeds {
|
|
|
|
if url == u {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RUnlock()
|
|
|
|
data.pageMu.RLock()
|
2020-08-10 18:50:40 -04:00
|
|
|
for u := range data.Pages {
|
|
|
|
if url == u {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetFeed returns a Feed object and a bool indicating whether the passed
|
|
|
|
// content was actually recognized as a feed.
|
|
|
|
func GetFeed(mediatype, filename string, r io.Reader) (*gofeed.Feed, bool) {
|
|
|
|
// Check mediatype and filename
|
|
|
|
if mediatype != "application/atom+xml" && mediatype != "application/rss+xml" &&
|
|
|
|
filename != "atom.xml" && filename != "feed.xml" &&
|
|
|
|
!strings.HasSuffix(filename, ".atom") && !strings.HasSuffix(filename, ".rss") {
|
|
|
|
// No part of the above is true
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
feed, err := gofeed.NewParser().Parse(r)
|
2020-08-16 17:42:45 -04:00
|
|
|
if feed == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
2020-08-10 18:50:40 -04:00
|
|
|
return feed, err == nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func writeJson() error {
|
2020-08-16 17:42:45 -04:00
|
|
|
writeMu.Lock()
|
|
|
|
defer writeMu.Unlock()
|
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
f, err := os.OpenFile(config.FeedPath, os.O_WRONLY|os.O_CREATE, 0666)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
enc := json.NewEncoder(f)
|
|
|
|
enc.SetEscapeHTML(false)
|
|
|
|
enc.SetIndent("", " ")
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-17 13:31:45 -04:00
|
|
|
data.Lock()
|
2020-08-10 18:50:40 -04:00
|
|
|
err = enc.Encode(&data)
|
2020-08-17 13:31:45 -04:00
|
|
|
data.Unlock()
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddFeed stores a feed.
|
2020-08-16 17:42:45 -04:00
|
|
|
// It can be used to update a feed for a URL, although the package
|
|
|
|
// will handle that on its own.
|
2020-08-10 18:50:40 -04:00
|
|
|
func AddFeed(url string, feed *gofeed.Feed) error {
|
2020-08-16 17:42:45 -04:00
|
|
|
if feed == nil {
|
|
|
|
panic("feed is nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove any content to save memory and disk space
|
|
|
|
for _, item := range feed.Items {
|
|
|
|
item.Content = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
data.feedMu.Lock()
|
2020-08-10 18:50:40 -04:00
|
|
|
data.Feeds[url] = feed
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.Unlock()
|
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
err := writeJson()
|
|
|
|
if err != nil {
|
2020-08-16 17:42:45 -04:00
|
|
|
// Don't use in-memory if it couldn't be saved
|
|
|
|
data.feedMu.Lock()
|
|
|
|
delete(data.Feeds, url)
|
|
|
|
data.feedMu.Unlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return ErrSaving
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddPage stores a page URL to track for changes.
|
2020-08-16 17:42:45 -04:00
|
|
|
// Do not use it to update a page, as it only resets the hash.
|
2020-08-10 18:50:40 -04:00
|
|
|
func AddPage(url string) error {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.Lock()
|
|
|
|
data.Pages[url] = &pageJson{} // No hash yet
|
|
|
|
data.pageMu.Unlock()
|
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
err := writeJson()
|
|
|
|
if err != nil {
|
2020-08-16 17:42:45 -04:00
|
|
|
// Don't use in-memory if it couldn't be saved
|
|
|
|
data.pageMu.Lock()
|
|
|
|
delete(data.Pages, url)
|
|
|
|
data.pageMu.Unlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return ErrSaving
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
|
|
|
func updateFeed(url string) error {
|
|
|
|
res, err := client.Fetch(url)
|
|
|
|
if err != nil {
|
|
|
|
if res != nil {
|
|
|
|
res.Body.Close()
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer res.Body.Close()
|
|
|
|
|
|
|
|
if res.Status != gemini.StatusSuccess {
|
|
|
|
return ErrNotSuccess
|
|
|
|
}
|
|
|
|
mediatype, _, err := mime.ParseMediaType(res.Meta)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
filename := path.Base(url)
|
|
|
|
feed, ok := GetFeed(mediatype, filename, res.Body)
|
|
|
|
if !ok {
|
|
|
|
return ErrNotFeed
|
|
|
|
}
|
|
|
|
return AddFeed(url, feed)
|
|
|
|
}
|
|
|
|
|
|
|
|
func updatePage(url string) error {
|
|
|
|
res, err := client.Fetch(url)
|
|
|
|
if err != nil {
|
|
|
|
if res != nil {
|
|
|
|
res.Body.Close()
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer res.Body.Close()
|
|
|
|
|
|
|
|
if res.Status != gemini.StatusSuccess {
|
|
|
|
return ErrNotSuccess
|
|
|
|
}
|
|
|
|
h := sha256.New()
|
|
|
|
if _, err := io.Copy(h, res.Body); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-08-17 13:31:45 -04:00
|
|
|
newHash := fmt.Sprintf("%x", h.Sum(nil))
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.Lock()
|
2020-08-17 13:31:45 -04:00
|
|
|
if data.Pages[url].Hash != newHash {
|
|
|
|
// Page content is different
|
|
|
|
data.Pages[url] = &pageJson{
|
|
|
|
Hash: newHash,
|
|
|
|
Changed: time.Now().UTC(),
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
data.pageMu.Unlock()
|
|
|
|
|
|
|
|
err = writeJson()
|
|
|
|
if err != nil {
|
|
|
|
// Don't use in-memory if it couldn't be saved
|
|
|
|
data.pageMu.Lock()
|
|
|
|
delete(data.Pages, url)
|
|
|
|
data.pageMu.Unlock()
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// updateAll updates all feeds and pages.
|
|
|
|
func updateAll() {
|
2020-08-17 13:31:45 -04:00
|
|
|
// TODO: Is two goroutines the right amount?
|
|
|
|
|
|
|
|
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
|
|
|
|
// Each job is: []string{<type>, "url"}
|
|
|
|
// where <type> is "feed" or "page"
|
|
|
|
|
|
|
|
defer wg.Done()
|
|
|
|
for j := range jobs {
|
|
|
|
if j[0] == "feed" {
|
|
|
|
updateFeed(j[1])
|
|
|
|
}
|
|
|
|
if j[0] == "page" {
|
|
|
|
updatePage(j[1])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
data.RLock()
|
|
|
|
numJobs := len(data.Feeds) + len(data.Pages)
|
|
|
|
jobs := make(chan [2]string, numJobs)
|
|
|
|
|
|
|
|
// Start 2 workers, waiting for jobs
|
|
|
|
for w := 0; w < 2; w++ {
|
|
|
|
wg.Add(1)
|
|
|
|
go worker(jobs, &wg)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get map keys in a slice
|
|
|
|
|
|
|
|
feedKeys := make([]string, len(data.Feeds))
|
|
|
|
i := 0
|
|
|
|
for k := range data.Feeds {
|
|
|
|
feedKeys[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
|
|
|
|
pageKeys := make([]string, len(data.Pages))
|
|
|
|
i = 0
|
|
|
|
for k := range data.Pages {
|
|
|
|
pageKeys[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
data.RUnlock()
|
|
|
|
|
|
|
|
for j := 0; j < numJobs; j++ {
|
|
|
|
if j < len(feedKeys) {
|
|
|
|
jobs <- [2]string{"feed", feedKeys[j]}
|
|
|
|
} else {
|
|
|
|
// In the Pages
|
|
|
|
jobs <- [2]string{"page", pageKeys[j-len(feedKeys)]}
|
|
|
|
}
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-17 13:31:45 -04:00
|
|
|
wg.Wait()
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
2020-08-17 15:33:53 -04:00
|
|
|
|
|
|
|
// GetPageEntries returns the current list of PageEntries
|
|
|
|
// for use in rendering a page.
|
|
|
|
// The contents of the entries will never change, and this
|
|
|
|
// function should be called again to get updates.
|
|
|
|
func GetPageEntries() *PageEntries {
|
|
|
|
var pe PageEntries
|
|
|
|
|
|
|
|
data.RLock()
|
|
|
|
|
|
|
|
for _, feed := range data.Feeds {
|
|
|
|
for _, item := range feed.Items {
|
|
|
|
|
|
|
|
var pub time.Time
|
|
|
|
|
|
|
|
if !item.UpdatedParsed.IsZero() {
|
|
|
|
pub = *item.UpdatedParsed
|
|
|
|
} else if !item.PublishedParsed.IsZero() {
|
|
|
|
pub = *item.PublishedParsed
|
|
|
|
} else {
|
|
|
|
// No time on the post
|
|
|
|
pub = time.Now()
|
|
|
|
}
|
|
|
|
|
|
|
|
pe.Entries = append(pe.Entries, &PageEntry{
|
|
|
|
Author: feed.Author.Name,
|
|
|
|
Title: item.Title,
|
|
|
|
URL: item.Link,
|
|
|
|
Published: pub,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for url, page := range data.Pages {
|
|
|
|
parsed, _ := urlPkg.Parse(url)
|
|
|
|
pe.Entries = append(pe.Entries, &PageEntry{
|
|
|
|
Author: parsed.Host, // Domain is author
|
|
|
|
Title: path.Base(parsed.Path), // Filename is title
|
|
|
|
URL: url,
|
|
|
|
Published: page.Changed,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
data.RUnlock()
|
|
|
|
|
|
|
|
sort.Sort(&pe)
|
|
|
|
return &pe
|
|
|
|
}
|