2020-11-27 17:01:29 -05:00
|
|
|
package subscriptions
|
2020-08-10 18:50:40 -04:00
|
|
|
|
|
|
|
import (
|
2020-08-16 17:42:45 -04:00
|
|
|
"crypto/sha256"
|
2020-08-10 18:50:40 -04:00
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2020-12-05 20:35:15 -05:00
|
|
|
"io/ioutil"
|
2020-08-16 17:42:45 -04:00
|
|
|
"mime"
|
2020-12-17 21:48:23 -05:00
|
|
|
urlPkg "net/url"
|
2020-08-10 18:50:40 -04:00
|
|
|
"os"
|
2020-08-16 17:42:45 -04:00
|
|
|
"path"
|
2020-11-17 20:56:15 -05:00
|
|
|
"reflect"
|
2020-08-10 18:50:40 -04:00
|
|
|
"strings"
|
2020-08-16 17:42:45 -04:00
|
|
|
"sync"
|
|
|
|
"time"
|
2020-08-10 18:50:40 -04:00
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
"github.com/makeworld-the-better-one/amfora/client"
|
2020-08-10 18:50:40 -04:00
|
|
|
"github.com/makeworld-the-better-one/amfora/config"
|
2020-08-16 17:42:45 -04:00
|
|
|
"github.com/makeworld-the-better-one/go-gemini"
|
2020-08-10 18:50:40 -04:00
|
|
|
"github.com/mmcdole/gofeed"
|
2020-11-18 21:24:26 -05:00
|
|
|
"github.com/spf13/viper"
|
2020-08-10 18:50:40 -04:00
|
|
|
)
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
var (
|
2020-12-17 21:48:23 -05:00
|
|
|
ErrSaving = errors.New("couldn't save JSON to disk")
|
|
|
|
ErrNotSuccess = errors.New("status 20 not returned")
|
|
|
|
ErrNotFeed = errors.New("not a valid feed")
|
|
|
|
ErrTooManyRedirects = errors.New("redirected more than 5 times")
|
2020-08-16 17:42:45 -04:00
|
|
|
)
|
2020-08-10 18:50:40 -04:00
|
|
|
|
2020-11-27 17:01:29 -05:00
|
|
|
var writeMu = sync.Mutex{} // Prevent concurrent writes to subscriptions.json file
|
2020-08-28 19:33:37 -04:00
|
|
|
|
|
|
|
// LastUpdated is the time when the in-memory data was last updated.
|
2020-11-27 17:01:29 -05:00
|
|
|
// It can be used to know if the subscriptions page should be regenerated.
|
2020-08-28 19:33:37 -04:00
|
|
|
var LastUpdated time.Time
|
2020-08-10 18:50:40 -04:00
|
|
|
|
|
|
|
// Init should be called after config.Init.
|
|
|
|
func Init() error {
|
2020-11-27 17:01:29 -05:00
|
|
|
f, err := os.Open(config.SubscriptionPath)
|
2020-11-17 20:56:15 -05:00
|
|
|
if err == nil {
|
2020-11-18 16:10:22 -05:00
|
|
|
// File exists and could be opened
|
2020-11-17 20:56:15 -05:00
|
|
|
|
|
|
|
fi, err := f.Stat()
|
|
|
|
if err == nil && fi.Size() > 0 {
|
2020-11-18 16:10:22 -05:00
|
|
|
// File is not empty
|
2020-12-05 20:35:15 -05:00
|
|
|
|
|
|
|
jsonBytes, err := ioutil.ReadAll(f)
|
|
|
|
f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("read subscriptions.json error: %w", err)
|
|
|
|
}
|
|
|
|
err = json.Unmarshal(jsonBytes, &data)
|
|
|
|
if err != nil {
|
2020-12-06 22:15:24 -05:00
|
|
|
return fmt.Errorf("subscriptions.json is corrupted: %w", err)
|
2020-11-17 20:56:15 -05:00
|
|
|
}
|
|
|
|
}
|
2020-12-05 20:35:15 -05:00
|
|
|
f.Close()
|
2020-11-17 20:56:15 -05:00
|
|
|
} else if !os.IsNotExist(err) {
|
|
|
|
// There's an error opening the file, but it's not bc is doesn't exist
|
2020-12-06 22:15:24 -05:00
|
|
|
return fmt.Errorf("open subscriptions.json error: %w", err)
|
2020-12-21 12:42:43 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if data.Feeds == nil {
|
2020-12-05 20:35:15 -05:00
|
|
|
data.Feeds = make(map[string]*gofeed.Feed)
|
2020-12-21 12:42:43 -05:00
|
|
|
}
|
|
|
|
if data.Pages == nil {
|
2020-12-05 20:35:15 -05:00
|
|
|
data.Pages = make(map[string]*pageJSON)
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-28 19:33:37 -04:00
|
|
|
LastUpdated = time.Now()
|
2020-11-18 21:24:26 -05:00
|
|
|
|
2020-11-27 17:01:29 -05:00
|
|
|
if viper.GetInt("subscriptions.update_interval") > 0 {
|
|
|
|
// Update subscriptions every so often
|
2020-11-18 21:24:26 -05:00
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
updateAll()
|
2020-11-27 17:01:29 -05:00
|
|
|
time.Sleep(time.Duration(viper.GetInt("subscriptions.update_interval")) * time.Second)
|
2020-11-18 21:24:26 -05:00
|
|
|
}
|
|
|
|
}()
|
|
|
|
} else {
|
2020-11-27 17:01:29 -05:00
|
|
|
// User disabled automatic updates
|
2020-11-18 21:24:26 -05:00
|
|
|
// So just update once at the beginning
|
|
|
|
go updateAll()
|
|
|
|
}
|
|
|
|
|
2020-08-17 15:33:53 -04:00
|
|
|
return nil
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
|
|
|
|
2020-11-27 17:01:29 -05:00
|
|
|
// IsSubscribed returns true if the URL is already subscribed to,
|
|
|
|
// whether a feed or page.
|
|
|
|
func IsSubscribed(url string) bool {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RLock()
|
2020-08-10 18:50:40 -04:00
|
|
|
for u := range data.Feeds {
|
|
|
|
if url == u {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.RUnlock()
|
|
|
|
data.pageMu.RLock()
|
2020-08-10 18:50:40 -04:00
|
|
|
for u := range data.Pages {
|
|
|
|
if url == u {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.RUnlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetFeed returns a Feed object and a bool indicating whether the passed
|
|
|
|
// content was actually recognized as a feed.
|
|
|
|
func GetFeed(mediatype, filename string, r io.Reader) (*gofeed.Feed, bool) {
|
2020-08-28 19:33:37 -04:00
|
|
|
if r == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
// Check mediatype and filename
|
2020-11-17 11:26:49 -05:00
|
|
|
if mediatype != "application/atom+xml" && mediatype != "application/rss+xml" && mediatype != "application/json+feed" &&
|
|
|
|
filename != "atom.xml" && filename != "feed.xml" && filename != "feed.json" &&
|
2020-12-17 21:48:23 -05:00
|
|
|
!strings.HasSuffix(filename, ".atom") && !strings.HasSuffix(filename, ".rss") &&
|
|
|
|
!strings.HasSuffix(filename, ".xml") {
|
2020-08-10 18:50:40 -04:00
|
|
|
// No part of the above is true
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
feed, err := gofeed.NewParser().Parse(r)
|
2020-08-16 17:42:45 -04:00
|
|
|
if feed == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
2020-08-10 18:50:40 -04:00
|
|
|
return feed, err == nil
|
|
|
|
}
|
|
|
|
|
2020-08-28 12:18:30 -04:00
|
|
|
func writeJSON() error {
|
2020-08-16 17:42:45 -04:00
|
|
|
writeMu.Lock()
|
|
|
|
defer writeMu.Unlock()
|
|
|
|
|
2020-12-05 20:35:15 -05:00
|
|
|
data.Lock()
|
|
|
|
jsonBytes, err := json.MarshalIndent(&data, "", " ")
|
|
|
|
data.Unlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-12-05 20:35:15 -05:00
|
|
|
err = ioutil.WriteFile(config.SubscriptionPath, jsonBytes, 0666)
|
2020-11-17 20:56:15 -05:00
|
|
|
if err != nil {
|
2020-12-05 20:35:15 -05:00
|
|
|
return err
|
2020-11-17 20:56:15 -05:00
|
|
|
}
|
|
|
|
|
2020-12-05 20:35:15 -05:00
|
|
|
return nil
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// AddFeed stores a feed.
|
2020-08-16 17:42:45 -04:00
|
|
|
// It can be used to update a feed for a URL, although the package
|
|
|
|
// will handle that on its own.
|
2020-08-10 18:50:40 -04:00
|
|
|
func AddFeed(url string, feed *gofeed.Feed) error {
|
2020-08-16 17:42:45 -04:00
|
|
|
if feed == nil {
|
|
|
|
panic("feed is nil")
|
|
|
|
}
|
|
|
|
|
2020-11-19 11:34:10 -05:00
|
|
|
// Remove any unused fields to save memory and disk space
|
|
|
|
feed.Image = nil
|
|
|
|
feed.Generator = ""
|
|
|
|
feed.Categories = nil
|
|
|
|
feed.DublinCoreExt = nil
|
|
|
|
feed.ITunesExt = nil
|
|
|
|
feed.Custom = nil
|
2020-11-22 20:04:09 -05:00
|
|
|
feed.Link = ""
|
|
|
|
feed.Links = nil
|
2020-08-16 17:42:45 -04:00
|
|
|
for _, item := range feed.Items {
|
2020-11-19 11:34:10 -05:00
|
|
|
item.Description = ""
|
2020-08-16 17:42:45 -04:00
|
|
|
item.Content = ""
|
2020-11-19 11:34:10 -05:00
|
|
|
item.Image = nil
|
|
|
|
item.Categories = nil
|
|
|
|
item.Enclosures = nil
|
|
|
|
item.DublinCoreExt = nil
|
|
|
|
item.ITunesExt = nil
|
|
|
|
item.Extensions = nil
|
|
|
|
item.Custom = nil
|
2020-11-22 20:04:09 -05:00
|
|
|
item.Link = "" // Links is used instead
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
data.feedMu.Lock()
|
2020-11-17 20:56:15 -05:00
|
|
|
oldFeed, ok := data.Feeds[url]
|
|
|
|
if !ok || !reflect.DeepEqual(feed, oldFeed) {
|
|
|
|
// Feeds are different, or there was never an old one
|
|
|
|
|
2020-11-18 16:10:22 -05:00
|
|
|
LastUpdated = time.Now()
|
2020-11-17 20:56:15 -05:00
|
|
|
data.Feeds[url] = feed
|
|
|
|
data.feedMu.Unlock()
|
|
|
|
err := writeJSON()
|
|
|
|
if err != nil {
|
|
|
|
return ErrSaving
|
|
|
|
}
|
|
|
|
} else {
|
2020-08-16 17:42:45 -04:00
|
|
|
data.feedMu.Unlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-28 19:33:37 -04:00
|
|
|
// AddPage stores a page to track for changes.
|
|
|
|
// It can be used to update the page as well, although the package
|
|
|
|
// will handle that on its own.
|
|
|
|
func AddPage(url string, r io.Reader) error {
|
|
|
|
if r == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
h := sha256.New()
|
|
|
|
if _, err := io.Copy(h, r); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
newHash := fmt.Sprintf("%x", h.Sum(nil))
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
data.pageMu.Lock()
|
2020-08-28 19:33:37 -04:00
|
|
|
_, ok := data.Pages[url]
|
|
|
|
if !ok || data.Pages[url].Hash != newHash {
|
|
|
|
// Page content is different, or it didn't exist
|
2020-11-18 16:10:22 -05:00
|
|
|
|
|
|
|
LastUpdated = time.Now()
|
2020-08-28 19:33:37 -04:00
|
|
|
data.Pages[url] = &pageJSON{
|
|
|
|
Hash: newHash,
|
|
|
|
Changed: time.Now().UTC(),
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
|
|
|
data.pageMu.Unlock()
|
2020-11-17 20:56:15 -05:00
|
|
|
err := writeJSON()
|
|
|
|
if err != nil {
|
|
|
|
return ErrSaving
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
data.pageMu.Unlock()
|
2020-08-10 18:50:40 -04:00
|
|
|
}
|
2020-08-28 19:33:37 -04:00
|
|
|
|
2020-08-10 18:50:40 -04:00
|
|
|
return nil
|
|
|
|
}
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-12-17 21:48:23 -05:00
|
|
|
// getResource returns a URL and Response for the given URL.
|
|
|
|
// It will follow up to 5 redirects, and if there is a permanent
|
|
|
|
// redirect it will return the new URL. Otherwise the URL will
|
|
|
|
// stay the same. THe returned URL will never be empty.
|
|
|
|
//
|
|
|
|
// If there is over 5 redirects the error will be ErrTooManyRedirects.
|
|
|
|
// ErrNotSuccess, as well as other fetch errors will also be returned.
|
|
|
|
func getResource(url string) (string, *gemini.Response, error) {
|
2020-08-16 17:42:45 -04:00
|
|
|
res, err := client.Fetch(url)
|
|
|
|
if err != nil {
|
|
|
|
if res != nil {
|
|
|
|
res.Body.Close()
|
|
|
|
}
|
2020-12-17 21:48:23 -05:00
|
|
|
return url, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if res.Status == gemini.StatusSuccess {
|
|
|
|
// No redirects
|
|
|
|
return url, res, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
parsed, err := urlPkg.Parse(url)
|
|
|
|
if err != nil {
|
|
|
|
return url, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
i := 0
|
|
|
|
redirs := make([]int, 0)
|
|
|
|
urls := make([]*urlPkg.URL, 0)
|
|
|
|
|
|
|
|
// Loop through redirects
|
|
|
|
for (res.Status == gemini.StatusRedirectPermanent || res.Status == gemini.StatusRedirectTemporary) && i < 5 {
|
|
|
|
redirs = append(redirs, res.Status)
|
|
|
|
urls = append(urls, parsed)
|
|
|
|
|
|
|
|
tmp, err := parsed.Parse(res.Meta)
|
|
|
|
if err != nil {
|
|
|
|
// Redirect URL returned by the server is invalid
|
|
|
|
return url, nil, err
|
|
|
|
}
|
|
|
|
parsed = tmp
|
|
|
|
|
|
|
|
// Make the new request
|
|
|
|
res, err := client.Fetch(parsed.String())
|
|
|
|
if err != nil {
|
|
|
|
if res != nil {
|
|
|
|
res.Body.Close()
|
|
|
|
}
|
|
|
|
return url, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
|
|
|
|
// Two possible options here:
|
|
|
|
// - Never redirected, got error on start
|
|
|
|
// - No more redirects, other status code
|
|
|
|
// - Too many redirects
|
|
|
|
|
|
|
|
if i == 0 {
|
|
|
|
// Never redirected or succeeded
|
|
|
|
return url, res, ErrNotSuccess
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
|
2020-12-17 21:48:23 -05:00
|
|
|
if i < 5 {
|
|
|
|
// The server stopped redirecting after <5 redirects
|
|
|
|
|
|
|
|
if res.Status == gemini.StatusSuccess {
|
|
|
|
// It ended by succeeding
|
|
|
|
|
|
|
|
for j := range redirs {
|
|
|
|
if redirs[j] == gemini.StatusRedirectTemporary {
|
|
|
|
if j == 0 {
|
|
|
|
// First redirect is temporary
|
|
|
|
return url, res, nil
|
|
|
|
}
|
|
|
|
// There were permanent redirects before this one
|
|
|
|
// Return the URL of the latest permanent redirect
|
|
|
|
return urls[j-1].String(), res, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// They were all permanent redirects
|
|
|
|
return urls[len(urls)-1].String(), res, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// It stopped because there was a non-redirect, non-success response
|
|
|
|
return url, res, ErrNotSuccess
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
2020-12-17 21:48:23 -05:00
|
|
|
|
|
|
|
// Too many redirects, return original
|
|
|
|
return url, nil, ErrTooManyRedirects
|
|
|
|
}
|
|
|
|
|
|
|
|
func updateFeed(url string) {
|
|
|
|
newURL, res, err := getResource(url)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-08-16 17:42:45 -04:00
|
|
|
mediatype, _, err := mime.ParseMediaType(res.Meta)
|
|
|
|
if err != nil {
|
2020-12-17 21:48:23 -05:00
|
|
|
return
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
2020-12-17 21:48:23 -05:00
|
|
|
filename := path.Base(newURL)
|
2020-08-16 17:42:45 -04:00
|
|
|
feed, ok := GetFeed(mediatype, filename, res.Body)
|
|
|
|
if !ok {
|
2020-12-17 21:48:23 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-12-18 16:30:06 -05:00
|
|
|
err = AddFeed(newURL, feed)
|
|
|
|
if url != newURL && err == nil {
|
2020-12-17 21:48:23 -05:00
|
|
|
// URL has changed, remove old one
|
2020-12-18 16:30:06 -05:00
|
|
|
Remove(url) //nolint:errcheck
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-17 21:48:23 -05:00
|
|
|
func updatePage(url string) {
|
|
|
|
newURL, res, err := getResource(url)
|
2020-08-16 17:42:45 -04:00
|
|
|
if err != nil {
|
2020-12-17 21:48:23 -05:00
|
|
|
return
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
|
2020-12-18 16:30:06 -05:00
|
|
|
err = AddPage(newURL, res.Body)
|
|
|
|
if url != newURL && err == nil {
|
2020-12-17 21:48:23 -05:00
|
|
|
// URL has changed, remove old one
|
2020-12-18 16:30:06 -05:00
|
|
|
Remove(url) //nolint:errcheck
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-27 17:01:29 -05:00
|
|
|
// updateAll updates all subscriptions using workers.
|
2020-11-17 11:26:49 -05:00
|
|
|
// It only returns once all the workers are done.
|
2020-08-16 17:42:45 -04:00
|
|
|
func updateAll() {
|
2020-08-17 13:31:45 -04:00
|
|
|
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
|
2020-08-28 19:33:37 -04:00
|
|
|
// Each job is: [2]string{<type>, "url"}
|
2020-08-17 13:31:45 -04:00
|
|
|
// where <type> is "feed" or "page"
|
|
|
|
|
|
|
|
defer wg.Done()
|
|
|
|
for j := range jobs {
|
|
|
|
if j[0] == "feed" {
|
2021-02-17 14:17:13 -05:00
|
|
|
updateFeed(j[1])
|
2020-08-28 19:33:37 -04:00
|
|
|
} else if j[0] == "page" {
|
2021-02-17 14:17:13 -05:00
|
|
|
updatePage(j[1])
|
2020-08-17 13:31:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
data.RLock()
|
|
|
|
numJobs := len(data.Feeds) + len(data.Pages)
|
|
|
|
jobs := make(chan [2]string, numJobs)
|
|
|
|
|
2020-11-17 20:56:15 -05:00
|
|
|
if numJobs == 0 {
|
|
|
|
data.RUnlock()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-11-27 17:01:29 -05:00
|
|
|
numWorkers := viper.GetInt("subscriptions.workers")
|
2020-11-18 21:24:26 -05:00
|
|
|
if numWorkers < 1 {
|
|
|
|
numWorkers = 1
|
|
|
|
}
|
|
|
|
|
|
|
|
// Start workers, waiting for jobs
|
|
|
|
for w := 0; w < numWorkers; w++ {
|
2020-08-17 13:31:45 -04:00
|
|
|
wg.Add(1)
|
2020-12-06 22:15:24 -05:00
|
|
|
go func() {
|
2020-11-17 20:56:15 -05:00
|
|
|
worker(jobs, &wg)
|
2020-12-06 22:15:24 -05:00
|
|
|
}()
|
2020-08-17 13:31:45 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get map keys in a slice
|
|
|
|
|
|
|
|
feedKeys := make([]string, len(data.Feeds))
|
|
|
|
i := 0
|
|
|
|
for k := range data.Feeds {
|
|
|
|
feedKeys[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
|
|
|
|
pageKeys := make([]string, len(data.Pages))
|
|
|
|
i = 0
|
|
|
|
for k := range data.Pages {
|
|
|
|
pageKeys[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
data.RUnlock()
|
|
|
|
|
|
|
|
for j := 0; j < numJobs; j++ {
|
|
|
|
if j < len(feedKeys) {
|
|
|
|
jobs <- [2]string{"feed", feedKeys[j]}
|
|
|
|
} else {
|
|
|
|
// In the Pages
|
|
|
|
jobs <- [2]string{"page", pageKeys[j-len(feedKeys)]}
|
|
|
|
}
|
|
|
|
}
|
2020-11-17 20:56:15 -05:00
|
|
|
close(jobs)
|
2020-08-16 17:42:45 -04:00
|
|
|
|
2020-08-17 13:31:45 -04:00
|
|
|
wg.Wait()
|
2020-08-16 17:42:45 -04:00
|
|
|
}
|
2020-12-05 20:35:15 -05:00
|
|
|
|
2020-12-06 20:57:57 -05:00
|
|
|
// AllURLs returns all the subscribed-to URLS.
|
2020-12-05 20:35:15 -05:00
|
|
|
func AllURLS() []string {
|
|
|
|
data.RLock()
|
|
|
|
defer data.RUnlock()
|
|
|
|
|
|
|
|
urls := make([]string, len(data.Feeds)+len(data.Pages))
|
|
|
|
i := 0
|
|
|
|
for k := range data.Feeds {
|
|
|
|
urls[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
for k := range data.Pages {
|
|
|
|
urls[i] = k
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
|
|
|
|
return urls
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove removes a subscription from memory and from the disk.
|
|
|
|
// The URL must be provided. It will do nothing if the URL is
|
|
|
|
// not an actual subscription.
|
|
|
|
//
|
2020-12-06 20:57:57 -05:00
|
|
|
// It returns any errors that occurred when saving to disk.
|
2020-12-05 20:35:15 -05:00
|
|
|
func Remove(u string) error {
|
|
|
|
data.Lock()
|
|
|
|
// Just delete from both instead of using a loop to find it
|
|
|
|
delete(data.Feeds, u)
|
|
|
|
delete(data.Pages, u)
|
|
|
|
data.Unlock()
|
|
|
|
return writeJSON()
|
|
|
|
}
|