mirror of
https://github.com/makew0rld/amfora.git
synced 2025-02-02 15:07:34 -05:00
🔀 Merge branch 'feeds' - subscriptions feature
See #61 for details on this feature.
This commit is contained in:
commit
6d8e823e51
@ -15,7 +15,6 @@ linters:
|
||||
- dupl
|
||||
- exhaustive
|
||||
- exportloopref
|
||||
- goconst
|
||||
- gocritic
|
||||
- goerr113
|
||||
- gofmt
|
||||
|
@ -6,6 +6,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- **Subscriptions** to feeds and page changes (#61)
|
||||
- Opening local files with `file://` URIs (#103, #117)
|
||||
- `show_link` option added in config to optionally see the URL (#133)
|
||||
|
||||
|
4
Makefile
4
Makefile
@ -21,8 +21,8 @@ clean:
|
||||
|
||||
.PHONY: install
|
||||
install: amfora amfora.desktop
|
||||
install -Dm 755 amfora $(PREFIX)/bin/amfora
|
||||
install -Dm 644 amfora.desktop $(PREFIX)/share/applications/amfora.desktop
|
||||
install -m 755 amfora $(PREFIX)/bin/amfora
|
||||
install -m 644 amfora.desktop $(PREFIX)/share/applications/amfora.desktop
|
||||
|
||||
.PHONY: uninstall
|
||||
uninstall:
|
||||
|
6
NOTES.md
6
NOTES.md
@ -3,10 +3,6 @@
|
||||
## Issues
|
||||
- URL for each tab should not be stored as a string - in the current code there's lots of reparsing the URL
|
||||
|
||||
|
||||
## Regressions
|
||||
|
||||
|
||||
## Upstream Bugs
|
||||
- Wrapping messes up on brackets
|
||||
- Filed [issue 23](https://gitlab.com/tslocum/cview/-/issues/23)
|
||||
@ -21,4 +17,4 @@
|
||||
- Bookmark keys aren't deleted, just set to `""`
|
||||
- Waiting on [this viper PR](https://github.com/spf13/viper/pull/519) to be merged
|
||||
- Help table cells aren't dynamically wrapped
|
||||
- Filed [issue 29](https://gitlab.com/tslocum/cview/-/issues/29)
|
||||
- Filed [issue 29](https://gitlab.com/tslocum/cview/-/issues/29)
|
||||
|
@ -122,9 +122,9 @@ Features in *italics* are in the master branch, but not in the latest release.
|
||||
- Manage and browse them
|
||||
- Similar to [Kristall](https://github.com/MasterQ32/kristall)
|
||||
- https://lists.orbitalfox.eu/archives/gemini/2020/001400.html
|
||||
- [ ] Subscribe to RSS and Atom feeds and display them
|
||||
- Subscribing to page changes, similar to how Spacewalk works, will also be supported
|
||||
- *In progress on `feeds` branch*
|
||||
- [x] *Subscriptions*
|
||||
- Subscribing to RSS, Atom, and [JSON Feeds](https://jsonfeed.org/) are all supported
|
||||
- So is subscribing to a page, to know when it changes
|
||||
- [ ] Stream support
|
||||
- [ ] Table of contents for pages
|
||||
- [ ] Search in pages with <kbd>Ctrl-F</kbd>
|
||||
@ -151,8 +151,9 @@ Amfora ❤️ open source!
|
||||
- It uses [tcell](https://github.com/gdamore/tcell) for low level terminal operations
|
||||
- [Viper](https://github.com/spf13/viper) for configuration and TOFU storing
|
||||
- [go-gemini](https://github.com/makeworld-the-better-one/go-gemini), my forked and updated Gemini client/server library
|
||||
- My [progressbar fork](https://github.com/makeworld-the-better-one/progressbar)
|
||||
- My [progressbar fork](https://github.com/makeworld-the-better-one/progressbar) - pull request [here](https://github.com/schollz/progressbar/pull/69)
|
||||
- [go-humanize](https://github.com/dustin/go-humanize)
|
||||
- My [gofeed fork](https://github.com/makeworld-the-better-one/gofeed) - pull request [here](https://github.com/mmcdole/gofeed/pull/164)
|
||||
|
||||
## License
|
||||
This project is licensed under the GPL v3.0. See the [LICENSE](./LICENSE) file for details.
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"github.com/makeworld-the-better-one/amfora/client"
|
||||
"github.com/makeworld-the-better-one/amfora/config"
|
||||
"github.com/makeworld-the-better-one/amfora/display"
|
||||
"github.com/makeworld-the-better-one/amfora/subscriptions"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -40,7 +41,12 @@ func main() {
|
||||
|
||||
err := config.Init()
|
||||
if err != nil {
|
||||
fmt.Printf("Config error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "Config error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
err = subscriptions.Init()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "subscriptions.json error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
|
0
cache/favicons.go → cache/favicon.go
vendored
0
cache/favicons.go → cache/favicon.go
vendored
6
cache/cache.go → cache/page.go
vendored
6
cache/cache.go → cache/page.go
vendored
@ -1,10 +1,8 @@
|
||||
// Package cache provides an interface for a cache of strings, aka text/gemini pages, and redirects.
|
||||
// It is fully thread safe.
|
||||
// The redirect cache is not limited.
|
||||
package cache
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/makeworld-the-better-one/amfora/structs"
|
||||
@ -22,7 +20,7 @@ func SetMaxPages(max int) {
|
||||
maxPages = max
|
||||
}
|
||||
|
||||
// SetMaxSize sets the max size the cache can be, in bytes.
|
||||
// SetMaxSize sets the max size the page cache can be, in bytes.
|
||||
// A value <= 0 means infinite size.
|
||||
func SetMaxSize(max int) {
|
||||
maxSize = max
|
||||
@ -48,7 +46,7 @@ func removeURL(url string) {
|
||||
// If your page is larger than the max cache size, the provided page
|
||||
// will silently not be added to the cache.
|
||||
func AddPage(p *structs.Page) {
|
||||
if p.URL == "" || strings.HasPrefix(p.URL, "about:") {
|
||||
if p.URL == "" {
|
||||
// Just in case, these pages shouldn't be cached
|
||||
return
|
||||
}
|
@ -5,6 +5,7 @@ import (
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/makeworld-the-better-one/go-gemini"
|
||||
@ -14,6 +15,8 @@ import (
|
||||
|
||||
var (
|
||||
certCache = make(map[string][][]byte)
|
||||
certCacheMu = &sync.RWMutex{}
|
||||
|
||||
fetchClient *gemini.Client
|
||||
dlClient *gemini.Client // For downloading
|
||||
)
|
||||
@ -30,8 +33,11 @@ func Init() {
|
||||
}
|
||||
|
||||
func clientCert(host string) ([]byte, []byte) {
|
||||
if cert := certCache[host]; cert != nil {
|
||||
return cert[0], cert[1]
|
||||
certCacheMu.RLock()
|
||||
pair, ok := certCache[host]
|
||||
certCacheMu.RUnlock()
|
||||
if ok {
|
||||
return pair[0], pair[1]
|
||||
}
|
||||
|
||||
// Expand paths starting with ~/
|
||||
@ -44,22 +50,30 @@ func clientCert(host string) ([]byte, []byte) {
|
||||
keyPath = viper.GetString("auth.keys." + host)
|
||||
}
|
||||
if certPath == "" && keyPath == "" {
|
||||
certCacheMu.Lock()
|
||||
certCache[host] = [][]byte{nil, nil}
|
||||
certCacheMu.Unlock()
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
cert, err := ioutil.ReadFile(certPath)
|
||||
if err != nil {
|
||||
certCacheMu.Lock()
|
||||
certCache[host] = [][]byte{nil, nil}
|
||||
certCacheMu.Unlock()
|
||||
return nil, nil
|
||||
}
|
||||
key, err := ioutil.ReadFile(keyPath)
|
||||
if err != nil {
|
||||
certCacheMu.Lock()
|
||||
certCache[host] = [][]byte{nil, nil}
|
||||
certCacheMu.Unlock()
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
certCacheMu.Lock()
|
||||
certCache[host] = [][]byte{cert, key}
|
||||
certCacheMu.Unlock()
|
||||
return cert, key
|
||||
}
|
||||
|
||||
|
@ -39,6 +39,10 @@ var bkmkPath string
|
||||
|
||||
var DownloadsDir string
|
||||
|
||||
// Subscriptions
|
||||
var subscriptionDir string
|
||||
var SubscriptionPath string
|
||||
|
||||
// Command for opening HTTP(S) URLs in the browser, from "a-general.http" in config.
|
||||
var HTTPCommand []string
|
||||
|
||||
@ -96,6 +100,22 @@ func Init() error {
|
||||
}
|
||||
bkmkPath = filepath.Join(bkmkDir, "bookmarks.toml")
|
||||
|
||||
// Feeds dir and path
|
||||
if runtime.GOOS == "windows" {
|
||||
// In APPDATA beside other Amfora files
|
||||
subscriptionDir = amforaAppData
|
||||
} else {
|
||||
// XDG data dir on POSIX systems
|
||||
xdg_data, ok := os.LookupEnv("XDG_DATA_HOME")
|
||||
if ok && strings.TrimSpace(xdg_data) != "" {
|
||||
subscriptionDir = filepath.Join(xdg_data, "amfora")
|
||||
} else {
|
||||
// Default to ~/.local/share/amfora
|
||||
subscriptionDir = filepath.Join(home, ".local", "share", "amfora")
|
||||
}
|
||||
}
|
||||
SubscriptionPath = filepath.Join(subscriptionDir, "subscriptions.json")
|
||||
|
||||
// *** Create necessary files and folders ***
|
||||
|
||||
// Config
|
||||
@ -131,6 +151,11 @@ func Init() error {
|
||||
if err == nil {
|
||||
f.Close()
|
||||
}
|
||||
// Feeds
|
||||
err = os.MkdirAll(subscriptionDir, 0755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// *** Downloads paths, setup, and creation ***
|
||||
|
||||
@ -210,6 +235,10 @@ func Init() error {
|
||||
viper.SetDefault("url-handlers.other", "off")
|
||||
viper.SetDefault("cache.max_size", 0)
|
||||
viper.SetDefault("cache.max_pages", 20)
|
||||
viper.SetDefault("subscriptions.popup", true)
|
||||
viper.SetDefault("subscriptions.update_interval", 1800)
|
||||
viper.SetDefault("subscriptions.workers", 3)
|
||||
viper.SetDefault("subscriptions.entries_per_page", 20)
|
||||
|
||||
viper.SetConfigFile(configPath)
|
||||
viper.SetConfigType("toml")
|
||||
|
@ -138,6 +138,28 @@ max_pages = 30 # The maximum number of pages the cache will store
|
||||
# Note that HTTP and HTTPS are treated as separate protocols here.
|
||||
|
||||
|
||||
[subscriptions]
|
||||
# For tracking feeds and pages
|
||||
|
||||
# Whether a pop-up appears when viewing a potential feed
|
||||
popup = true
|
||||
|
||||
# How often to check for updates to subscriptions in the background, in seconds.
|
||||
# Set it to 0 to disable this feature. You can still update individual feeds
|
||||
# manually, or restart the browser.
|
||||
#
|
||||
# Note Amfora will check for updates on browser start no matter what this setting is.
|
||||
update_interval = 1800 # 30 mins
|
||||
|
||||
# How many subscriptions can be checked at the same time when updating.
|
||||
# If you have many subscriptions you may want to increase this for faster
|
||||
# update times. Any value below 1 will be corrected to 1.
|
||||
workers = 3
|
||||
|
||||
# The number of subscription updates displayed per page.
|
||||
entries_per_page = 20
|
||||
|
||||
|
||||
[theme]
|
||||
# This section is for changing the COLORS used in Amfora.
|
||||
# These colors only apply if 'color' is enabled above.
|
||||
@ -196,6 +218,8 @@ max_pages = 30 # The maximum number of pages the cache will store
|
||||
# yesno_modal_text
|
||||
# tofu_modal_bg
|
||||
# tofu_modal_text
|
||||
# subscription_modal_bg
|
||||
# subscription_modal_text
|
||||
|
||||
# input_modal_bg
|
||||
# input_modal_text
|
||||
|
@ -8,7 +8,7 @@ import (
|
||||
)
|
||||
|
||||
// Functions to allow themeing configuration.
|
||||
// UI element colors are mapped to a string key, such as "error" or "tab_background"
|
||||
// UI element colors are mapped to a string key, such as "error" or "tab_bg"
|
||||
// These are the same keys used in the config file.
|
||||
|
||||
var themeMu = sync.RWMutex{}
|
||||
@ -26,18 +26,20 @@ var theme = map[string]tcell.Color{
|
||||
"btn_bg": tcell.ColorNavy, // All modal buttons
|
||||
"btn_text": tcell.ColorWhite,
|
||||
|
||||
"dl_choice_modal_bg": tcell.ColorPurple,
|
||||
"dl_choice_modal_text": tcell.ColorWhite,
|
||||
"dl_modal_bg": tcell.Color130, // xterm:DarkOrange3, #af5f00
|
||||
"dl_modal_text": tcell.ColorWhite,
|
||||
"info_modal_bg": tcell.ColorGray,
|
||||
"info_modal_text": tcell.ColorWhite,
|
||||
"error_modal_bg": tcell.ColorMaroon,
|
||||
"error_modal_text": tcell.ColorWhite,
|
||||
"yesno_modal_bg": tcell.ColorPurple,
|
||||
"yesno_modal_text": tcell.ColorWhite,
|
||||
"tofu_modal_bg": tcell.ColorMaroon,
|
||||
"tofu_modal_text": tcell.ColorWhite,
|
||||
"dl_choice_modal_bg": tcell.ColorPurple,
|
||||
"dl_choice_modal_text": tcell.ColorWhite,
|
||||
"dl_modal_bg": tcell.Color130, // xterm:DarkOrange3, #af5f00
|
||||
"dl_modal_text": tcell.ColorWhite,
|
||||
"info_modal_bg": tcell.ColorGray,
|
||||
"info_modal_text": tcell.ColorWhite,
|
||||
"error_modal_bg": tcell.ColorMaroon,
|
||||
"error_modal_text": tcell.ColorWhite,
|
||||
"yesno_modal_bg": tcell.ColorPurple,
|
||||
"yesno_modal_text": tcell.ColorWhite,
|
||||
"tofu_modal_bg": tcell.ColorMaroon,
|
||||
"tofu_modal_text": tcell.ColorWhite,
|
||||
"subscription_modal_bg": tcell.Color61, // xterm:SlateBlue3, #5f5faf
|
||||
"subscription_modal_text": tcell.ColorWhite,
|
||||
|
||||
"input_modal_bg": tcell.ColorGreen,
|
||||
"input_modal_text": tcell.ColorWhite,
|
||||
@ -64,8 +66,8 @@ var theme = map[string]tcell.Color{
|
||||
|
||||
func SetColor(key string, color tcell.Color) {
|
||||
themeMu.Lock()
|
||||
defer themeMu.Unlock()
|
||||
theme[key] = color
|
||||
themeMu.Unlock()
|
||||
}
|
||||
|
||||
// GetColor will return tcell.ColorBlack if there is no color for the provided key.
|
||||
|
@ -135,6 +135,28 @@ max_pages = 30 # The maximum number of pages the cache will store
|
||||
# Note that HTTP and HTTPS are treated as separate protocols here.
|
||||
|
||||
|
||||
[subscriptions]
|
||||
# For tracking feeds and pages
|
||||
|
||||
# Whether a pop-up appears when viewing a potential feed
|
||||
popup = true
|
||||
|
||||
# How often to check for updates to subscriptions in the background, in seconds.
|
||||
# Set it to 0 to disable this feature. You can still update individual feeds
|
||||
# manually, or restart the browser.
|
||||
#
|
||||
# Note Amfora will check for updates on browser start no matter what this setting is.
|
||||
update_interval = 1800 # 30 mins
|
||||
|
||||
# How many subscriptions can be checked at the same time when updating.
|
||||
# If you have many subscriptions you may want to increase this for faster
|
||||
# update times. Any value below 1 will be corrected to 1.
|
||||
workers = 3
|
||||
|
||||
# The number of subscription updates displayed per page.
|
||||
entries_per_page = 20
|
||||
|
||||
|
||||
[theme]
|
||||
# This section is for changing the COLORS used in Amfora.
|
||||
# These colors only apply if 'color' is enabled above.
|
||||
@ -193,6 +215,8 @@ max_pages = 30 # The maximum number of pages the cache will store
|
||||
# yesno_modal_text
|
||||
# tofu_modal_bg
|
||||
# tofu_modal_text
|
||||
# subscription_modal_bg
|
||||
# subscription_modal_text
|
||||
|
||||
# input_modal_bg
|
||||
# input_modal_text
|
||||
|
@ -110,16 +110,17 @@ func openBkmkModal(name string, exists bool, favicon string) (string, int) {
|
||||
|
||||
// Bookmarks displays the bookmarks page on the current tab.
|
||||
func Bookmarks(t *tab) {
|
||||
bkmkPageRaw := "# Bookmarks\r\n\r\n"
|
||||
|
||||
// Gather bookmarks
|
||||
rawContent := "# Bookmarks\r\n\r\n"
|
||||
m, keys := bookmarks.All()
|
||||
for i := range keys {
|
||||
rawContent += fmt.Sprintf("=> %s %s\r\n", keys[i], m[keys[i]])
|
||||
bkmkPageRaw += fmt.Sprintf("=> %s %s\r\n", keys[i], m[keys[i]])
|
||||
}
|
||||
// Render and display
|
||||
content, links := renderer.RenderGemini(rawContent, textWidth(), leftMargin(), false)
|
||||
content, links := renderer.RenderGemini(bkmkPageRaw, textWidth(), leftMargin(), false)
|
||||
page := structs.Page{
|
||||
Raw: rawContent,
|
||||
Raw: bkmkPageRaw,
|
||||
Content: content,
|
||||
Links: links,
|
||||
URL: "about:bookmarks",
|
||||
|
@ -207,6 +207,7 @@ func Init() {
|
||||
})
|
||||
|
||||
// Render the default new tab content ONCE and store it for later
|
||||
// This code is repeated in Reload()
|
||||
newTabContent := getNewTabContent()
|
||||
renderedNewTabContent, newTabLinks := renderer.RenderGemini(newTabContent, textWidth(), leftMargin(), false)
|
||||
newTabPage = structs.Page{
|
||||
@ -292,6 +293,13 @@ func Init() {
|
||||
Info("The current page has no content, so it couldn't be downloaded.")
|
||||
}
|
||||
return nil
|
||||
case tcell.KeyCtrlA:
|
||||
Subscriptions(tabs[curTab], "about:subscriptions")
|
||||
tabs[curTab].addToHistory("about:subscriptions")
|
||||
return nil
|
||||
case tcell.KeyCtrlX:
|
||||
go addSubscription()
|
||||
return nil
|
||||
case tcell.KeyRune:
|
||||
// Regular key was sent
|
||||
switch string(event.Rune()) {
|
||||
@ -568,20 +576,11 @@ func Reload() {
|
||||
// URL loads and handles the provided URL for the current tab.
|
||||
// It should be an absolute URL.
|
||||
func URL(u string) {
|
||||
// Some code is copied in followLink()
|
||||
|
||||
if u == "about:bookmarks" { //nolint:goconst
|
||||
Bookmarks(tabs[curTab])
|
||||
tabs[curTab].addToHistory("about:bookmarks")
|
||||
return
|
||||
}
|
||||
if u == "about:newtab" {
|
||||
temp := newTabPage // Copy
|
||||
setPage(tabs[curTab], &temp)
|
||||
return
|
||||
}
|
||||
t := tabs[curTab]
|
||||
if strings.HasPrefix(u, "about:") {
|
||||
Error("Error", "Not a valid 'about:' URL.")
|
||||
if final, ok := handleAbout(t, u); ok {
|
||||
t.addToHistory(final)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@ -589,7 +588,7 @@ func URL(u string) {
|
||||
// Assume it's a Gemini URL
|
||||
u = "gemini://" + u
|
||||
}
|
||||
go goURL(tabs[curTab], u)
|
||||
go goURL(t, u)
|
||||
}
|
||||
|
||||
func NumTabs() int {
|
||||
|
522
display/handlers.go
Normal file
522
display/handlers.go
Normal file
@ -0,0 +1,522 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"mime"
|
||||
"net"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/makeworld-the-better-one/amfora/cache"
|
||||
"github.com/makeworld-the-better-one/amfora/client"
|
||||
"github.com/makeworld-the-better-one/amfora/config"
|
||||
"github.com/makeworld-the-better-one/amfora/renderer"
|
||||
"github.com/makeworld-the-better-one/amfora/structs"
|
||||
"github.com/makeworld-the-better-one/amfora/subscriptions"
|
||||
"github.com/makeworld-the-better-one/amfora/webbrowser"
|
||||
"github.com/makeworld-the-better-one/go-gemini"
|
||||
"github.com/makeworld-the-better-one/go-isemoji"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// handleHTTP is used by handleURL.
|
||||
// It opens HTTP links and displays Info and Error modals.
|
||||
// Returns false if there was an error.
|
||||
func handleHTTP(u string, showInfo bool) bool {
|
||||
if len(config.HTTPCommand) == 1 {
|
||||
// Possibly a non-command
|
||||
|
||||
switch strings.TrimSpace(config.HTTPCommand[0]) {
|
||||
case "", "off":
|
||||
Error("HTTP Error", "Opening HTTP URLs is turned off.")
|
||||
return false
|
||||
case "default":
|
||||
s, err := webbrowser.Open(u)
|
||||
if err != nil {
|
||||
Error("Webbrowser Error", err.Error())
|
||||
return false
|
||||
}
|
||||
if showInfo {
|
||||
Info(s)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Custom command
|
||||
var err error = nil
|
||||
if len(config.HTTPCommand) > 1 {
|
||||
err = exec.Command(config.HTTPCommand[0], append(config.HTTPCommand[1:], u)...).Start()
|
||||
} else {
|
||||
err = exec.Command(config.HTTPCommand[0], u).Start()
|
||||
}
|
||||
if err != nil {
|
||||
Error("HTTP Error", "Error executing custom browser command: "+err.Error())
|
||||
return false
|
||||
}
|
||||
|
||||
App.Draw()
|
||||
return true
|
||||
}
|
||||
|
||||
// handleOther is used by handleURL.
|
||||
// It opens links other than Gemini and HTTP and displays Error modals.
|
||||
func handleOther(u string) {
|
||||
// The URL should have a scheme due to a previous call to normalizeURL
|
||||
parsed, _ := url.Parse(u)
|
||||
|
||||
// Search for a handler for the URL scheme
|
||||
handler := strings.TrimSpace(viper.GetString("url-handlers." + parsed.Scheme))
|
||||
if len(handler) == 0 {
|
||||
handler = strings.TrimSpace(viper.GetString("url-handlers.other"))
|
||||
}
|
||||
switch handler {
|
||||
case "", "off":
|
||||
Error("URL Error", "Opening "+parsed.Scheme+" URLs is turned off.")
|
||||
default:
|
||||
// The config has a custom command to execute for URLs
|
||||
fields := strings.Fields(handler)
|
||||
err := exec.Command(fields[0], append(fields[1:], u)...).Start()
|
||||
if err != nil {
|
||||
Error("URL Error", "Error executing custom command: "+err.Error())
|
||||
}
|
||||
}
|
||||
App.Draw()
|
||||
}
|
||||
|
||||
// handleFavicon handles getting and displaying a favicon.
|
||||
// `old` is the previous favicon for the tab.
|
||||
func handleFavicon(t *tab, host, old string) {
|
||||
defer func() {
|
||||
// Update display if needed
|
||||
if t.page.Favicon != old && isValidTab(t) {
|
||||
rewriteTabRow()
|
||||
}
|
||||
}()
|
||||
|
||||
if !viper.GetBool("a-general.emoji_favicons") {
|
||||
// Not enabled
|
||||
return
|
||||
}
|
||||
if t.page.Favicon != "" {
|
||||
return
|
||||
}
|
||||
if host == "" {
|
||||
return
|
||||
}
|
||||
|
||||
fav := cache.GetFavicon(host)
|
||||
if fav == cache.KnownNoFavicon {
|
||||
// It's been cached that this host doesn't have a favicon
|
||||
return
|
||||
}
|
||||
if fav != "" {
|
||||
t.page.Favicon = fav
|
||||
rewriteTabRow()
|
||||
return
|
||||
}
|
||||
|
||||
// No favicon cached
|
||||
res, err := client.Fetch("gemini://" + host + "/favicon.txt")
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
res.Body.Close()
|
||||
}
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.Status != 20 {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
if !strings.HasPrefix(res.Meta, "text/") {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
// It's a regular plain response
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
_, err = io.CopyN(buf, res.Body, 29+2+1) // 29 is the max emoji length, +2 for CRLF, +1 so that the right size will EOF
|
||||
if err == nil {
|
||||
// Content was too large
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
} else if err != io.EOF {
|
||||
// Some network reading error
|
||||
// No favicon is NOT known, could be a temporary error
|
||||
return
|
||||
}
|
||||
// EOF, which is what we want.
|
||||
emoji := strings.TrimRight(buf.String(), "\r\n")
|
||||
if !isemoji.IsEmoji(emoji) {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
// Valid favicon found
|
||||
t.page.Favicon = emoji
|
||||
cache.AddFavicon(host, emoji)
|
||||
}
|
||||
|
||||
// handleAbout can be called to deal with any URLs that start with
|
||||
// 'about:'. It will display errors if the URL is not recognized,
|
||||
// but not display anything if an 'about:' URL is not passed.
|
||||
//
|
||||
// It does not add the displayed page to history.
|
||||
//
|
||||
// It returns the URL displayed, and a bool indicating if the provided
|
||||
// URL could be handled. The string returned will always be empty
|
||||
// if the bool is false.
|
||||
func handleAbout(t *tab, u string) (string, bool) {
|
||||
if !strings.HasPrefix(u, "about:") {
|
||||
return "", false
|
||||
}
|
||||
|
||||
switch u {
|
||||
case "about:bookmarks":
|
||||
Bookmarks(t)
|
||||
return u, true
|
||||
case "about:newtab":
|
||||
temp := newTabPage // Copy
|
||||
setPage(t, &temp)
|
||||
t.applyBottomBar()
|
||||
return u, true
|
||||
}
|
||||
|
||||
if u == "about:subscriptions" || (len(u) > 20 && u[:20] == "about:subscriptions?") {
|
||||
// about:subscriptions?2 views page 2
|
||||
return Subscriptions(t, u), true
|
||||
}
|
||||
if u == "about:manage-subscriptions" || (len(u) > 27 && u[:27] == "about:manage-subscriptions?") {
|
||||
ManageSubscriptions(t, u)
|
||||
// Don't count remove command in history
|
||||
if u == "about:manage-subscriptions" {
|
||||
return u, true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
Error("Error", "Not a valid 'about:' URL.")
|
||||
return "", false
|
||||
}
|
||||
|
||||
// handleURL displays whatever action is needed for the provided URL,
|
||||
// and applies it to the current tab.
|
||||
// It loads documents, handles errors, brings up a download prompt, etc.
|
||||
//
|
||||
// The string returned is the final URL, if redirects were involved.
|
||||
// In most cases it will be the same as the passed URL.
|
||||
// If there is some error, it will return "".
|
||||
// The second returned item is a bool indicating if page content was displayed.
|
||||
// It returns false for Errors, other protocols, etc.
|
||||
//
|
||||
// The bottomBar is not actually changed in this func, except during loading.
|
||||
// The func that calls this one should apply the bottomBar values if necessary.
|
||||
//
|
||||
// numRedirects is the number of redirects that resulted in the provided URL.
|
||||
// It should typically be 0.
|
||||
func handleURL(t *tab, u string, numRedirects int) (string, bool) {
|
||||
defer App.Draw() // Just in case
|
||||
|
||||
// Save for resetting on error
|
||||
oldLable := t.barLabel
|
||||
oldText := t.barText
|
||||
|
||||
// Custom return function
|
||||
ret := func(s string, b bool) (string, bool) {
|
||||
if !b {
|
||||
// Reset bottomBar if page wasn't loaded
|
||||
t.barLabel = oldLable
|
||||
t.barText = oldText
|
||||
}
|
||||
t.mode = tabModeDone
|
||||
|
||||
go func(p *structs.Page) {
|
||||
if b && t.hasContent() && !subscriptions.IsSubscribed(s) && viper.GetBool("subscriptions.popup") {
|
||||
// The current page might be an untracked feed, and the user wants
|
||||
// to be notified in such cases.
|
||||
|
||||
feed, isFeed := getFeedFromPage(p)
|
||||
if isFeed && isValidTab(t) && t.page == p {
|
||||
// After parsing and track-checking time, the page is still being displayed
|
||||
addFeedDirect(p.URL, feed, false)
|
||||
}
|
||||
}
|
||||
}(t.page)
|
||||
|
||||
return s, b
|
||||
}
|
||||
|
||||
t.barLabel = ""
|
||||
bottomBar.SetLabel("")
|
||||
|
||||
App.SetFocus(t.view)
|
||||
|
||||
if strings.HasPrefix(u, "about:") {
|
||||
return ret(handleAbout(t, u))
|
||||
}
|
||||
|
||||
u = normalizeURL(u)
|
||||
u = cache.Redirect(u)
|
||||
|
||||
parsed, err := url.Parse(u)
|
||||
if err != nil {
|
||||
Error("URL Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
proxy := strings.TrimSpace(viper.GetString("proxies." + parsed.Scheme))
|
||||
usingProxy := false
|
||||
|
||||
proxyHostname, proxyPort, err := net.SplitHostPort(proxy)
|
||||
if err != nil {
|
||||
// Error likely means there's no port in the host
|
||||
proxyHostname = proxy
|
||||
proxyPort = "1965"
|
||||
}
|
||||
|
||||
if strings.HasPrefix(u, "http") {
|
||||
if proxy == "" || proxy == "off" {
|
||||
// No proxy available
|
||||
handleHTTP(u, true)
|
||||
return ret("", false)
|
||||
}
|
||||
usingProxy = true
|
||||
}
|
||||
|
||||
if strings.HasPrefix(u, "file") {
|
||||
page, ok := handleFile(u)
|
||||
if !ok {
|
||||
return ret("", false)
|
||||
}
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(u, "http") && !strings.HasPrefix(u, "gemini") && !strings.HasPrefix(u, "file") {
|
||||
// Not a Gemini URL
|
||||
if proxy == "" || proxy == "off" {
|
||||
// No proxy available
|
||||
handleOther(u)
|
||||
return ret("", false)
|
||||
}
|
||||
usingProxy = true
|
||||
}
|
||||
|
||||
// Gemini URL, or one with a Gemini proxy available
|
||||
|
||||
// Load page from cache if it exists,
|
||||
// and this isn't a page that was redirected to by the server (indicates dynamic content)
|
||||
if numRedirects == 0 {
|
||||
page, ok := cache.GetPage(u)
|
||||
if ok {
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
}
|
||||
// Otherwise download it
|
||||
bottomBar.SetText("Loading...")
|
||||
t.barText = "Loading..." // Save it too, in case the tab switches during loading
|
||||
t.mode = tabModeLoading
|
||||
App.Draw()
|
||||
|
||||
var res *gemini.Response
|
||||
if usingProxy {
|
||||
res, err = client.FetchWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res, err = client.Fetch(u)
|
||||
}
|
||||
|
||||
// Loading may have taken a while, make sure tab is still valid
|
||||
if !isValidTab(t) {
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
if errors.Is(err, client.ErrTofu) {
|
||||
if usingProxy {
|
||||
// They are using a proxy
|
||||
if Tofu(proxy, client.GetExpiry(proxyHostname, proxyPort)) {
|
||||
// They want to continue anyway
|
||||
client.ResetTofuEntry(proxyHostname, proxyPort, res.Cert)
|
||||
// Response can be used further down, no need to reload
|
||||
} else {
|
||||
// They don't want to continue
|
||||
return ret("", false)
|
||||
}
|
||||
} else {
|
||||
if Tofu(parsed.Host, client.GetExpiry(parsed.Hostname(), parsed.Port())) {
|
||||
// They want to continue anyway
|
||||
client.ResetTofuEntry(parsed.Hostname(), parsed.Port(), res.Cert)
|
||||
// Response can be used further down, no need to reload
|
||||
} else {
|
||||
// They don't want to continue
|
||||
return ret("", false)
|
||||
}
|
||||
}
|
||||
} else if err != nil {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
if renderer.CanDisplay(res) {
|
||||
page, err := renderer.MakePage(u, res, textWidth(), leftMargin(), usingProxy)
|
||||
// Rendering may have taken a while, make sure tab is still valid
|
||||
if !isValidTab(t) {
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
var res2 *gemini.Response
|
||||
var dlErr error
|
||||
|
||||
if errors.Is(err, renderer.ErrTooLarge) {
|
||||
// Make new request for downloading purposes
|
||||
if usingProxy {
|
||||
res2, dlErr = client.DownloadWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res2, dlErr = client.Download(u)
|
||||
}
|
||||
if dlErr != nil && !errors.Is(dlErr, client.ErrTofu) {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
go dlChoice("That page is too large. What would you like to do?", u, res2)
|
||||
return ret("", false)
|
||||
}
|
||||
if errors.Is(err, renderer.ErrTimedOut) {
|
||||
// Make new request for downloading purposes
|
||||
if usingProxy {
|
||||
res2, dlErr = client.DownloadWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res2, dlErr = client.Download(u)
|
||||
}
|
||||
if dlErr != nil && !errors.Is(dlErr, client.ErrTofu) {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
go dlChoice("Loading that page timed out. What would you like to do?", u, res2)
|
||||
return ret("", false)
|
||||
}
|
||||
if err != nil {
|
||||
Error("Page Error", "Issuing creating page: "+err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
page.Width = termW
|
||||
|
||||
if !client.HasClientCert(parsed.Host) {
|
||||
// Don't cache pages with client certs
|
||||
go cache.AddPage(page)
|
||||
}
|
||||
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
// Not displayable
|
||||
// Could be a non 20 (or 21) status code, or a different kind of document
|
||||
|
||||
// Handle each status code
|
||||
switch res.Status {
|
||||
case 10, 11:
|
||||
userInput, ok := Input(res.Meta)
|
||||
if ok {
|
||||
// Make another request with the query string added
|
||||
// + chars are replaced because PathEscape doesn't do that
|
||||
parsed.RawQuery = gemini.QueryEscape(userInput)
|
||||
if len(parsed.String()) > gemini.URLMaxLength {
|
||||
Error("Input Error", "URL for that input would be too long.")
|
||||
return ret("", false)
|
||||
}
|
||||
return ret(handleURL(t, parsed.String(), 0))
|
||||
}
|
||||
return ret("", false)
|
||||
case 30, 31:
|
||||
parsedMeta, err := url.Parse(res.Meta)
|
||||
if err != nil {
|
||||
Error("Redirect Error", "Invalid URL: "+err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
redir := parsed.ResolveReference(parsedMeta).String()
|
||||
// Prompt before redirecting to non-Gemini protocol
|
||||
redirect := false
|
||||
if !strings.HasPrefix(redir, "gemini") {
|
||||
if YesNo("Follow redirect to non-Gemini URL?\n" + redir) {
|
||||
redirect = true
|
||||
} else {
|
||||
return ret("", false)
|
||||
}
|
||||
}
|
||||
// Prompt before redirecting
|
||||
autoRedirect := viper.GetBool("a-general.auto_redirect")
|
||||
if redirect || (autoRedirect && numRedirects < 5) || YesNo("Follow redirect?\n"+redir) {
|
||||
if res.Status == gemini.StatusRedirectPermanent {
|
||||
go cache.AddRedir(u, redir)
|
||||
}
|
||||
return ret(handleURL(t, redir, numRedirects+1))
|
||||
}
|
||||
return ret("", false)
|
||||
case 40:
|
||||
Error("Temporary Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 41:
|
||||
Error("Server Unavailable", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 42:
|
||||
Error("CGI Error", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 43:
|
||||
Error("Proxy Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 44:
|
||||
Error("Slow Down", "You should wait "+escapeMeta(res.Meta)+" seconds before making another request.")
|
||||
return ret("", false)
|
||||
case 50:
|
||||
Error("Permanent Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 51:
|
||||
Error("Not Found", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 52:
|
||||
Error("Gone", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 53:
|
||||
Error("Proxy Request Refused", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 59:
|
||||
Error("Bad Request", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 60:
|
||||
Error("Client Certificate Required", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 61:
|
||||
Error("Certificate Not Authorised", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 62:
|
||||
Error("Certificate Not Valid", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
// Status code 20, but not a document that can be displayed
|
||||
|
||||
// First see if it's a feed, and ask the user about adding it if it is
|
||||
filename := path.Base(parsed.Path)
|
||||
mediatype, _, _ := mime.ParseMediaType(res.Meta)
|
||||
feed, ok := subscriptions.GetFeed(mediatype, filename, res.Body)
|
||||
if ok {
|
||||
go func() {
|
||||
added := addFeedDirect(u, feed, subscriptions.IsSubscribed(u))
|
||||
if !added {
|
||||
// Otherwise offer download choices
|
||||
go dlChoice("That file could not be displayed. What would you like to do?", u, res)
|
||||
}
|
||||
}()
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
// Otherwise offer download choices
|
||||
go dlChoice("That file could not be displayed. What would you like to do?", u, res)
|
||||
return ret("", false)
|
||||
}
|
@ -42,6 +42,8 @@ Ctrl-R, R|Reload a page, discarding the cached version.
|
||||
Ctrl-B|View bookmarks
|
||||
Ctrl-D|Add, change, or remove a bookmark for the current page.
|
||||
Ctrl-S|Save the current page to your downloads.
|
||||
Ctrl-A|View subscriptions
|
||||
Ctrl-X|Add or update a subscription
|
||||
q, Ctrl-Q|Quit
|
||||
Ctrl-C|Hard quit. This can be used when in the middle of downloading,
|
||||
|for example.
|
||||
|
@ -18,6 +18,7 @@ You can customize this page by creating a gemtext file called newtab.gmi, in Amf
|
||||
Happy browsing!
|
||||
|
||||
=> about:bookmarks Bookmarks
|
||||
=> about:subscriptions Subscriptions
|
||||
|
||||
=> //gemini.circumlunar.space Project Gemini
|
||||
=> https://github.com/makeworld-the-better-one/amfora Amfora homepage [HTTPS]
|
||||
|
@ -1,24 +1,14 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/makeworld-the-better-one/amfora/cache"
|
||||
"github.com/makeworld-the-better-one/amfora/client"
|
||||
"github.com/makeworld-the-better-one/amfora/config"
|
||||
"github.com/makeworld-the-better-one/amfora/renderer"
|
||||
"github.com/makeworld-the-better-one/amfora/structs"
|
||||
"github.com/makeworld-the-better-one/amfora/webbrowser"
|
||||
"github.com/makeworld-the-better-one/go-gemini"
|
||||
"github.com/makeworld-the-better-one/go-isemoji"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
@ -29,15 +19,10 @@ import (
|
||||
// Not when a URL is opened on a new tab for the first time.
|
||||
// It will handle setting the bottomBar.
|
||||
func followLink(t *tab, prev, next string) {
|
||||
|
||||
// Copied from URL()
|
||||
if next == "about:bookmarks" {
|
||||
Bookmarks(t)
|
||||
t.addToHistory("about:bookmarks")
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(next, "about:") {
|
||||
Error("Error", "Not a valid 'about:' URL for linking")
|
||||
if final, ok := handleAbout(t, next); ok {
|
||||
t.addToHistory(final)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@ -144,146 +129,6 @@ func setPage(t *tab, p *structs.Page) {
|
||||
t.barText = p.URL
|
||||
}
|
||||
|
||||
// handleHTTP is used by handleURL.
|
||||
// It opens HTTP links and displays Info and Error modals.
|
||||
// Returns false if there was an error.
|
||||
func handleHTTP(u string, showInfo bool) bool {
|
||||
if len(config.HTTPCommand) == 1 {
|
||||
// Possibly a non-command
|
||||
|
||||
switch strings.TrimSpace(config.HTTPCommand[0]) {
|
||||
case "", "off":
|
||||
Error("HTTP Error", "Opening HTTP URLs is turned off.")
|
||||
return false
|
||||
case "default":
|
||||
s, err := webbrowser.Open(u)
|
||||
if err != nil {
|
||||
Error("Webbrowser Error", err.Error())
|
||||
return false
|
||||
}
|
||||
if showInfo {
|
||||
Info(s)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Custom command
|
||||
var err error = nil
|
||||
if len(config.HTTPCommand) > 1 {
|
||||
err = exec.Command(config.HTTPCommand[0], append(config.HTTPCommand[1:], u)...).Start()
|
||||
} else {
|
||||
err = exec.Command(config.HTTPCommand[0], u).Start()
|
||||
}
|
||||
if err != nil {
|
||||
Error("HTTP Error", "Error executing custom browser command: "+err.Error())
|
||||
return false
|
||||
}
|
||||
|
||||
App.Draw()
|
||||
return true
|
||||
}
|
||||
|
||||
// handleOther is used by handleURL.
|
||||
// It opens links other than Gemini and HTTP and displays Error modals.
|
||||
func handleOther(u string) {
|
||||
// The URL should have a scheme due to a previous call to normalizeURL
|
||||
parsed, _ := url.Parse(u)
|
||||
|
||||
// Search for a handler for the URL scheme
|
||||
handler := strings.TrimSpace(viper.GetString("url-handlers." + parsed.Scheme))
|
||||
if len(handler) == 0 {
|
||||
handler = strings.TrimSpace(viper.GetString("url-handlers.other"))
|
||||
}
|
||||
switch handler {
|
||||
case "", "off":
|
||||
Error("URL Error", "Opening "+parsed.Scheme+" URLs is turned off.")
|
||||
default:
|
||||
// The config has a custom command to execute for URLs
|
||||
fields := strings.Fields(handler)
|
||||
err := exec.Command(fields[0], append(fields[1:], u)...).Start()
|
||||
if err != nil {
|
||||
Error("URL Error", "Error executing custom command: "+err.Error())
|
||||
}
|
||||
}
|
||||
App.Draw()
|
||||
}
|
||||
|
||||
// handleFavicon handles getting and displaying a favicon.
|
||||
// `old` is the previous favicon for the tab.
|
||||
func handleFavicon(t *tab, host, old string) {
|
||||
defer func() {
|
||||
// Update display if needed
|
||||
if t.page.Favicon != old && isValidTab(t) {
|
||||
rewriteTabRow()
|
||||
}
|
||||
}()
|
||||
|
||||
if !viper.GetBool("a-general.emoji_favicons") {
|
||||
// Not enabled
|
||||
return
|
||||
}
|
||||
if t.page.Favicon != "" {
|
||||
return
|
||||
}
|
||||
if host == "" {
|
||||
return
|
||||
}
|
||||
|
||||
fav := cache.GetFavicon(host)
|
||||
if fav == cache.KnownNoFavicon {
|
||||
// It's been cached that this host doesn't have a favicon
|
||||
return
|
||||
}
|
||||
if fav != "" {
|
||||
t.page.Favicon = fav
|
||||
rewriteTabRow()
|
||||
return
|
||||
}
|
||||
|
||||
// No favicon cached
|
||||
res, err := client.Fetch("gemini://" + host + "/favicon.txt")
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
res.Body.Close()
|
||||
}
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.Status != 20 {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
if !strings.HasPrefix(res.Meta, "text/") {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
// It's a regular plain response
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
_, err = io.CopyN(buf, res.Body, 29+2+1) // 29 is the max emoji length, +2 for CRLF, +1 so that the right size will EOF
|
||||
if err == nil {
|
||||
// Content was too large
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
} else if err != io.EOF {
|
||||
// Some network reading error
|
||||
// No favicon is NOT known, could be a temporary error
|
||||
return
|
||||
}
|
||||
// EOF, which is what we want.
|
||||
emoji := strings.TrimRight(buf.String(), "\r\n")
|
||||
if !isemoji.IsEmoji(emoji) {
|
||||
cache.AddFavicon(host, cache.KnownNoFavicon)
|
||||
return
|
||||
}
|
||||
// Valid favicon found
|
||||
t.page.Favicon = emoji
|
||||
cache.AddFavicon(host, emoji)
|
||||
}
|
||||
|
||||
// goURL is like handleURL, but takes care of history and the bottomBar.
|
||||
// It should be preferred over handleURL in most cases.
|
||||
// It has no return values to be processed.
|
||||
@ -300,293 +145,6 @@ func goURL(t *tab, u string) {
|
||||
}
|
||||
}
|
||||
|
||||
// handleURL displays whatever action is needed for the provided URL,
|
||||
// and applies it to the current tab.
|
||||
// It loads documents, handles errors, brings up a download prompt, etc.
|
||||
//
|
||||
// The string returned is the final URL, if redirects were involved.
|
||||
// In most cases it will be the same as the passed URL.
|
||||
// If there is some error, it will return "".
|
||||
// The second returned item is a bool indicating if page content was displayed.
|
||||
// It returns false for Errors, other protocols, etc.
|
||||
//
|
||||
// The bottomBar is not actually changed in this func, except during loading.
|
||||
// The func that calls this one should apply the bottomBar values if necessary.
|
||||
//
|
||||
// numRedirects is the number of redirects that resulted in the provided URL.
|
||||
// It should typically be 0.
|
||||
func handleURL(t *tab, u string, numRedirects int) (string, bool) {
|
||||
defer App.Draw() // Just in case
|
||||
|
||||
// Save for resetting on error
|
||||
oldLable := t.barLabel
|
||||
oldText := t.barText
|
||||
|
||||
// Custom return function
|
||||
ret := func(s string, b bool) (string, bool) {
|
||||
if !b {
|
||||
// Reset bottomBar if page wasn't loaded
|
||||
t.barLabel = oldLable
|
||||
t.barText = oldText
|
||||
}
|
||||
t.mode = tabModeDone
|
||||
return s, b
|
||||
}
|
||||
|
||||
t.barLabel = ""
|
||||
bottomBar.SetLabel("")
|
||||
|
||||
App.SetFocus(t.view)
|
||||
|
||||
// To allow linking to the bookmarks page, and history browsing
|
||||
if u == "about:bookmarks" {
|
||||
Bookmarks(t)
|
||||
return ret("about:bookmarks", true)
|
||||
}
|
||||
|
||||
u = normalizeURL(u)
|
||||
u = cache.Redirect(u)
|
||||
|
||||
parsed, err := url.Parse(u)
|
||||
if err != nil {
|
||||
Error("URL Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
proxy := strings.TrimSpace(viper.GetString("proxies." + parsed.Scheme))
|
||||
usingProxy := false
|
||||
|
||||
proxyHostname, proxyPort, err := net.SplitHostPort(proxy)
|
||||
if err != nil {
|
||||
// Error likely means there's no port in the host
|
||||
proxyHostname = proxy
|
||||
proxyPort = "1965"
|
||||
}
|
||||
|
||||
if strings.HasPrefix(u, "http") {
|
||||
if proxy == "" || proxy == "off" {
|
||||
// No proxy available
|
||||
handleHTTP(u, true)
|
||||
return ret("", false)
|
||||
}
|
||||
usingProxy = true
|
||||
}
|
||||
|
||||
if strings.HasPrefix(u, "file") {
|
||||
page, ok := handleFile(u)
|
||||
if !ok {
|
||||
return ret("", false)
|
||||
}
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(u, "http") && !strings.HasPrefix(u, "gemini") && !strings.HasPrefix(u, "file") {
|
||||
// Not a Gemini URL
|
||||
if proxy == "" || proxy == "off" {
|
||||
// No proxy available
|
||||
handleOther(u)
|
||||
return ret("", false)
|
||||
}
|
||||
usingProxy = true
|
||||
}
|
||||
|
||||
// Gemini URL, or one with a Gemini proxy available
|
||||
|
||||
// Load page from cache if it exists,
|
||||
// and this isn't a page that was redirected to by the server (indicates dynamic content)
|
||||
if numRedirects == 0 {
|
||||
page, ok := cache.GetPage(u)
|
||||
if ok {
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
}
|
||||
// Otherwise download it
|
||||
bottomBar.SetText("Loading...")
|
||||
t.barText = "Loading..." // Save it too, in case the tab switches during loading
|
||||
t.mode = tabModeLoading
|
||||
App.Draw()
|
||||
|
||||
var res *gemini.Response
|
||||
if usingProxy {
|
||||
res, err = client.FetchWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res, err = client.Fetch(u)
|
||||
}
|
||||
|
||||
// Loading may have taken a while, make sure tab is still valid
|
||||
if !isValidTab(t) {
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
if errors.Is(err, client.ErrTofu) {
|
||||
if usingProxy {
|
||||
// They are using a proxy
|
||||
if Tofu(proxy, client.GetExpiry(proxyHostname, proxyPort)) {
|
||||
// They want to continue anyway
|
||||
client.ResetTofuEntry(proxyHostname, proxyPort, res.Cert)
|
||||
// Response can be used further down, no need to reload
|
||||
} else {
|
||||
// They don't want to continue
|
||||
return ret("", false)
|
||||
}
|
||||
} else {
|
||||
if Tofu(parsed.Host, client.GetExpiry(parsed.Hostname(), parsed.Port())) {
|
||||
// They want to continue anyway
|
||||
client.ResetTofuEntry(parsed.Hostname(), parsed.Port(), res.Cert)
|
||||
// Response can be used further down, no need to reload
|
||||
} else {
|
||||
// They don't want to continue
|
||||
return ret("", false)
|
||||
}
|
||||
}
|
||||
} else if err != nil {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
if renderer.CanDisplay(res) {
|
||||
page, err := renderer.MakePage(u, res, textWidth(), leftMargin(), usingProxy)
|
||||
// Rendering may have taken a while, make sure tab is still valid
|
||||
if !isValidTab(t) {
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
var res2 *gemini.Response
|
||||
var dlErr error
|
||||
|
||||
if errors.Is(err, renderer.ErrTooLarge) {
|
||||
// Make new request for downloading purposes
|
||||
if usingProxy {
|
||||
res2, dlErr = client.DownloadWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res2, dlErr = client.Download(u)
|
||||
}
|
||||
if dlErr != nil && !errors.Is(dlErr, client.ErrTofu) {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
go dlChoice("That page is too large. What would you like to do?", u, res2)
|
||||
return ret("", false)
|
||||
}
|
||||
if errors.Is(err, renderer.ErrTimedOut) {
|
||||
// Make new request for downloading purposes
|
||||
if usingProxy {
|
||||
res2, dlErr = client.DownloadWithProxy(proxyHostname, proxyPort, u)
|
||||
} else {
|
||||
res2, dlErr = client.Download(u)
|
||||
}
|
||||
if dlErr != nil && !errors.Is(dlErr, client.ErrTofu) {
|
||||
Error("URL Fetch Error", err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
go dlChoice("Loading that page timed out. What would you like to do?", u, res2)
|
||||
return ret("", false)
|
||||
}
|
||||
if err != nil {
|
||||
Error("Page Error", "Issuing creating page: "+err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
page.Width = termW
|
||||
|
||||
if !client.HasClientCert(parsed.Host) {
|
||||
// Don't cache pages with client certs
|
||||
go cache.AddPage(page)
|
||||
}
|
||||
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
// Not displayable
|
||||
// Could be a non 20 (or 21) status code, or a different kind of document
|
||||
|
||||
// Handle each status code
|
||||
switch res.Status {
|
||||
case 10, 11:
|
||||
userInput, ok := Input(res.Meta)
|
||||
if ok {
|
||||
// Make another request with the query string added
|
||||
// + chars are replaced because PathEscape doesn't do that
|
||||
parsed.RawQuery = gemini.QueryEscape(userInput)
|
||||
if len(parsed.String()) > gemini.URLMaxLength {
|
||||
Error("Input Error", "URL for that input would be too long.")
|
||||
return ret("", false)
|
||||
}
|
||||
return ret(handleURL(t, parsed.String(), 0))
|
||||
}
|
||||
return ret("", false)
|
||||
case 30, 31:
|
||||
parsedMeta, err := url.Parse(res.Meta)
|
||||
if err != nil {
|
||||
Error("Redirect Error", "Invalid URL: "+err.Error())
|
||||
return ret("", false)
|
||||
}
|
||||
redir := parsed.ResolveReference(parsedMeta).String()
|
||||
// Prompt before redirecting to non-Gemini protocol
|
||||
redirect := false
|
||||
if !strings.HasPrefix(redir, "gemini") {
|
||||
if YesNo("Follow redirect to non-Gemini URL?\n" + redir) {
|
||||
redirect = true
|
||||
} else {
|
||||
return ret("", false)
|
||||
}
|
||||
}
|
||||
// Prompt before redirecting
|
||||
autoRedirect := viper.GetBool("a-general.auto_redirect")
|
||||
if redirect || (autoRedirect && numRedirects < 5) || YesNo("Follow redirect?\n"+redir) {
|
||||
if res.Status == gemini.StatusRedirectPermanent {
|
||||
go cache.AddRedir(u, redir)
|
||||
}
|
||||
return ret(handleURL(t, redir, numRedirects+1))
|
||||
}
|
||||
return ret("", false)
|
||||
case 40:
|
||||
Error("Temporary Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 41:
|
||||
Error("Server Unavailable", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 42:
|
||||
Error("CGI Error", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 43:
|
||||
Error("Proxy Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 44:
|
||||
Error("Slow Down", "You should wait "+escapeMeta(res.Meta)+" seconds before making another request.")
|
||||
return ret("", false)
|
||||
case 50:
|
||||
Error("Permanent Failure", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 51:
|
||||
Error("Not Found", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 52:
|
||||
Error("Gone", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 53:
|
||||
Error("Proxy Request Refused", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 59:
|
||||
Error("Bad Request", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 60:
|
||||
Error("Client Certificate Required", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 61:
|
||||
Error("Certificate Not Authorised", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
case 62:
|
||||
Error("Certificate Not Valid", escapeMeta(res.Meta))
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
// Status code 20, but not a document that can be displayed
|
||||
go dlChoice("That file could not be displayed. What would you like to do?", u, res)
|
||||
return ret("", false)
|
||||
}
|
||||
|
||||
// rewriteTabRow clears the tabRow and writes all the tabs number/favicons into it.
|
||||
func rewriteTabRow() {
|
||||
tabRow.Clear()
|
||||
|
328
display/subscriptions.go
Normal file
328
display/subscriptions.go
Normal file
@ -0,0 +1,328 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gdamore/tcell"
|
||||
"github.com/makeworld-the-better-one/amfora/cache"
|
||||
"github.com/makeworld-the-better-one/amfora/config"
|
||||
"github.com/makeworld-the-better-one/amfora/renderer"
|
||||
"github.com/makeworld-the-better-one/amfora/structs"
|
||||
"github.com/makeworld-the-better-one/amfora/subscriptions"
|
||||
"github.com/makeworld-the-better-one/go-gemini"
|
||||
"github.com/mmcdole/gofeed"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// Map page number (zero-indexed) to the time it was made at.
|
||||
// This allows for caching the pages until there's an update.
|
||||
var subscriptionPageUpdated = make(map[int]time.Time)
|
||||
|
||||
// toLocalDay truncates the provided time to a date only,
|
||||
// but converts to the local time first.
|
||||
func toLocalDay(t time.Time) time.Time {
|
||||
t = t.Local()
|
||||
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
|
||||
}
|
||||
|
||||
// Subscriptions displays the subscriptions page on the current tab.
|
||||
func Subscriptions(t *tab, u string) string {
|
||||
pageN := 0 // Pages are zero-indexed internally
|
||||
|
||||
// Correct URL if query string exists
|
||||
// The only valid query string is an int above 1.
|
||||
// Anything "redirects" to the first page, with no query string.
|
||||
// This is done over just serving the first page content for
|
||||
// invalid query strings so that there won't be duplicate caches.
|
||||
correctURL := func(u2 string) string {
|
||||
if len(u2) > 20 && u2[:20] == "about:subscriptions?" {
|
||||
query, err := gemini.QueryUnescape(u2[20:])
|
||||
if err != nil {
|
||||
return "about:subscriptions"
|
||||
}
|
||||
// Valid query string
|
||||
i, err := strconv.Atoi(query)
|
||||
if err != nil {
|
||||
// Not an int
|
||||
return "about:subscriptions"
|
||||
}
|
||||
if i < 2 {
|
||||
return "about:subscriptions"
|
||||
}
|
||||
// Valid int above 1
|
||||
pageN = i - 1 // Pages are zero-indexed internally
|
||||
return u2
|
||||
}
|
||||
return u2
|
||||
}
|
||||
u = correctURL(u)
|
||||
|
||||
// Retrieve cached version if there hasn't been any updates
|
||||
p, ok := cache.GetPage(u)
|
||||
if subscriptionPageUpdated[pageN].After(subscriptions.LastUpdated) && ok {
|
||||
setPage(t, p)
|
||||
t.applyBottomBar()
|
||||
return u
|
||||
}
|
||||
|
||||
pe := subscriptions.GetPageEntries()
|
||||
|
||||
// Figure out where the entries for this page start, if at all.
|
||||
epp := viper.GetInt("subscriptions.entries_per_page")
|
||||
if epp <= 0 {
|
||||
epp = 1
|
||||
}
|
||||
start := pageN * epp // Index of the first page entry to be displayed
|
||||
end := start + epp
|
||||
if end > len(pe.Entries) {
|
||||
end = len(pe.Entries)
|
||||
}
|
||||
|
||||
var rawPage string
|
||||
if pageN == 0 {
|
||||
rawPage = "# Subscriptions\n\n" + rawPage
|
||||
} else {
|
||||
rawPage = fmt.Sprintf("# Subscriptions (page %d)\n\n", pageN+1) + rawPage
|
||||
}
|
||||
|
||||
if start > len(pe.Entries)-1 && len(pe.Entries) != 0 {
|
||||
// The page is out of range, doesn't exist
|
||||
rawPage += "This page does not exist.\n\n=> about:subscriptions Subscriptions\n"
|
||||
} else {
|
||||
// Render page
|
||||
|
||||
rawPage += "You can use Ctrl-X to subscribe to a page, or to an Atom/RSS/JSON feed. See the online wiki for more.\n" +
|
||||
"If you just opened Amfora then updates may appear incrementally. Reload the page to see them.\n\n" +
|
||||
"=> about:manage-subscriptions Manage subscriptions\n\n"
|
||||
|
||||
// curDay represents what day of posts the loop is on.
|
||||
// It only goes backwards in time.
|
||||
// Its initial setting means:
|
||||
// Only display posts older than 26 hours in the future, nothing further in the future.
|
||||
//
|
||||
// 26 hours was chosen because it is the largest timezone difference
|
||||
// currently in the world. Posts may be dated in the future
|
||||
// due to software bugs, where the local user's date is used, but
|
||||
// the UTC timezone is specified. Gemfeed does this at the time of
|
||||
// writing, but will not after #3 gets merged on its repo. Still,
|
||||
// the older version will be used for a while.
|
||||
curDay := toLocalDay(time.Now()).Add(26 * time.Hour)
|
||||
|
||||
for _, entry := range pe.Entries[start:end] { // From new to old
|
||||
// Convert to local time, remove sub-day info
|
||||
pub := toLocalDay(entry.Published)
|
||||
|
||||
if pub.Before(curDay) {
|
||||
// This post is on a new day, add a day header
|
||||
curDay = pub
|
||||
rawPage += fmt.Sprintf("\n## %s\n\n", curDay.Format("Jan 02, 2006"))
|
||||
}
|
||||
if entry.Title == "" || entry.Title == "/" {
|
||||
// Just put author/title
|
||||
// Mainly used for when you're tracking the root domain of a site
|
||||
rawPage += fmt.Sprintf("=>%s %s\n", entry.URL, entry.Prefix)
|
||||
} else {
|
||||
// Include title and dash
|
||||
rawPage += fmt.Sprintf("=>%s %s - %s\n", entry.URL, entry.Prefix, entry.Title)
|
||||
}
|
||||
}
|
||||
|
||||
if pageN == 0 && len(pe.Entries) > epp {
|
||||
// First page, and there's more than can fit
|
||||
rawPage += "\n\n=> about:subscriptions?2 Next Page\n"
|
||||
} else if pageN > 0 {
|
||||
// A later page
|
||||
rawPage += fmt.Sprintf(
|
||||
"\n\n=> about:subscriptions?%d Previous Page\n",
|
||||
pageN, // pageN is zero-indexed but the query string is one-indexed
|
||||
)
|
||||
if end != len(pe.Entries)-1 {
|
||||
// There's more
|
||||
rawPage += fmt.Sprintf("=> about:subscriptions?%d Next Page\n", pageN+2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
content, links := renderer.RenderGemini(rawPage, textWidth(), leftMargin(), false)
|
||||
page := structs.Page{
|
||||
Raw: rawPage,
|
||||
Content: content,
|
||||
Links: links,
|
||||
URL: u,
|
||||
Width: termW,
|
||||
Mediatype: structs.TextGemini,
|
||||
}
|
||||
go cache.AddPage(&page)
|
||||
setPage(t, &page)
|
||||
t.applyBottomBar()
|
||||
|
||||
subscriptionPageUpdated[pageN] = time.Now()
|
||||
|
||||
return u
|
||||
}
|
||||
|
||||
// ManageSubscriptions displays the subscription managing page in
|
||||
// the current tab. `u` is the URL entered by the user.
|
||||
func ManageSubscriptions(t *tab, u string) {
|
||||
if len(u) > 27 && u[:27] == "about:manage-subscriptions?" {
|
||||
// There's a query string, aka a URL to unsubscribe from
|
||||
manageSubscriptionQuery(t, u)
|
||||
return
|
||||
}
|
||||
|
||||
rawPage := "# Manage Subscriptions\n\n" +
|
||||
"Below is list of URLs you are subscribed to, both feeds and pages. " +
|
||||
"Navigate to the link to unsubscribe from that feed or page.\n\n"
|
||||
|
||||
urls := subscriptions.AllURLS()
|
||||
sort.Strings(urls)
|
||||
|
||||
for _, u2 := range urls {
|
||||
rawPage += fmt.Sprintf(
|
||||
"=>%s %s\n",
|
||||
"about:manage-subscriptions?"+gemini.QueryEscape(u2),
|
||||
u2,
|
||||
)
|
||||
}
|
||||
|
||||
content, links := renderer.RenderGemini(rawPage, textWidth(), leftMargin(), false)
|
||||
page := structs.Page{
|
||||
Raw: rawPage,
|
||||
Content: content,
|
||||
Links: links,
|
||||
URL: "about:manage-subscriptions",
|
||||
Width: termW,
|
||||
Mediatype: structs.TextGemini,
|
||||
}
|
||||
go cache.AddPage(&page)
|
||||
setPage(t, &page)
|
||||
t.applyBottomBar()
|
||||
}
|
||||
|
||||
func manageSubscriptionQuery(t *tab, u string) {
|
||||
sub, err := gemini.QueryUnescape(u[27:])
|
||||
if err != nil {
|
||||
Error("URL Error", "Invalid query string: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
err = subscriptions.Remove(sub)
|
||||
if err != nil {
|
||||
ManageSubscriptions(t, "about:manage-subscriptions") // Reload
|
||||
Error("Save Error", "Error saving the unsubscription to disk: "+err.Error())
|
||||
return
|
||||
}
|
||||
ManageSubscriptions(t, "about:manage-subscriptions") // Reload
|
||||
Info("Unsubscribed from " + sub)
|
||||
}
|
||||
|
||||
// openSubscriptionModal displays the "Add subscription" modal
|
||||
// It returns whether the user wanted to subscribe to feed/page.
|
||||
// The subscribed arg specifies whether this feed/page is already
|
||||
// subscribed to.
|
||||
func openSubscriptionModal(validFeed, subscribed bool) bool {
|
||||
// Reuses yesNoModal
|
||||
|
||||
if viper.GetBool("a-general.color") {
|
||||
yesNoModal.
|
||||
SetBackgroundColor(config.GetColor("subscription_modal_bg")).
|
||||
SetTextColor(config.GetColor("subscription_modal_text"))
|
||||
yesNoModal.GetFrame().
|
||||
SetBorderColor(config.GetColor("subscription_modal_text")).
|
||||
SetTitleColor(config.GetColor("subscription_modal_text"))
|
||||
} else {
|
||||
yesNoModal.
|
||||
SetBackgroundColor(tcell.ColorBlack).
|
||||
SetTextColor(tcell.ColorWhite)
|
||||
yesNoModal.GetFrame().
|
||||
SetBorderColor(tcell.ColorWhite).
|
||||
SetTitleColor(tcell.ColorWhite)
|
||||
}
|
||||
if validFeed {
|
||||
yesNoModal.GetFrame().SetTitle("Feed Subscription")
|
||||
if subscribed {
|
||||
yesNoModal.SetText("You are already subscribed to this feed. Would you like to manually update it?")
|
||||
} else {
|
||||
yesNoModal.SetText("Would you like to subscribe to this feed?")
|
||||
}
|
||||
} else {
|
||||
yesNoModal.GetFrame().SetTitle("Page Subscription")
|
||||
if subscribed {
|
||||
yesNoModal.SetText("You are already subscribed to this page. Would you like to manually update it?")
|
||||
} else {
|
||||
yesNoModal.SetText("Would you like to subscribe to this page?")
|
||||
}
|
||||
}
|
||||
|
||||
tabPages.ShowPage("yesno")
|
||||
tabPages.SendToFront("yesno")
|
||||
App.SetFocus(yesNoModal)
|
||||
App.Draw()
|
||||
|
||||
resp := <-yesNoCh
|
||||
tabPages.SwitchToPage(strconv.Itoa(curTab))
|
||||
App.SetFocus(tabs[curTab].view)
|
||||
App.Draw()
|
||||
return resp
|
||||
}
|
||||
|
||||
// getFeedFromPage is like subscriptions.GetFeed but takes a structs.Page as input.
|
||||
func getFeedFromPage(p *structs.Page) (*gofeed.Feed, bool) {
|
||||
parsed, _ := url.Parse(p.URL)
|
||||
filename := path.Base(parsed.Path)
|
||||
r := strings.NewReader(p.Raw)
|
||||
return subscriptions.GetFeed(p.RawMediatype, filename, r)
|
||||
}
|
||||
|
||||
// addFeedDirect is only for adding feeds, not pages.
|
||||
// It's for when you already have a feed and know if it's tracked.
|
||||
// Used mainly by handleURL because it already did a lot of the work.
|
||||
// It returns a bool indicating whether the user actually wanted to
|
||||
// add the feed or not.
|
||||
//
|
||||
// Like addFeed, it should be called in a goroutine.
|
||||
func addFeedDirect(u string, feed *gofeed.Feed, tracked bool) bool {
|
||||
if openSubscriptionModal(true, tracked) {
|
||||
err := subscriptions.AddFeed(u, feed)
|
||||
if err != nil {
|
||||
Error("Feed Error", err.Error())
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// addFeed goes through the process of subscribing to the current page/feed.
|
||||
// It is the high-level way of doing it. It should be called in a goroutine.
|
||||
func addSubscription() {
|
||||
t := tabs[curTab]
|
||||
p := t.page
|
||||
|
||||
if !t.hasContent() {
|
||||
// It's an about: page, or a malformed one
|
||||
return
|
||||
}
|
||||
|
||||
feed, isFeed := getFeedFromPage(p)
|
||||
tracked := subscriptions.IsSubscribed(p.URL)
|
||||
|
||||
if openSubscriptionModal(isFeed, tracked) {
|
||||
var err error
|
||||
|
||||
if isFeed {
|
||||
err = subscriptions.AddFeed(p.URL, feed)
|
||||
} else {
|
||||
err = subscriptions.AddPage(p.URL, strings.NewReader(p.Raw))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
Error("Feed/Page Error", err.Error())
|
||||
}
|
||||
}
|
||||
}
|
@ -146,9 +146,8 @@ func (t *tab) pageDown() {
|
||||
t.view.ScrollTo(row+(termH/4)*3, col)
|
||||
}
|
||||
|
||||
// hasContent returns true when the tab has a page that could be displayed.
|
||||
// The most likely situation where false would be returned is when the default
|
||||
// new tab content is being displayed.
|
||||
// hasContent returns false when the tab's page is malformed,
|
||||
// has no content or URL, or if it's an 'about:' page.
|
||||
func (t *tab) hasContent() bool {
|
||||
if t.page == nil || t.view == nil {
|
||||
return false
|
||||
|
3
go.mod
3
go.mod
@ -12,6 +12,7 @@ require (
|
||||
github.com/makeworld-the-better-one/progressbar/v3 v3.3.5-0.20200710151429-125743e22b4f
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/mitchellh/mapstructure v1.3.1 // indirect
|
||||
github.com/mmcdole/gofeed v1.1.0
|
||||
github.com/pelletier/go-toml v1.8.0 // indirect
|
||||
github.com/rkoesters/xdg v0.0.0-20181125232953-edd15b846f9b
|
||||
github.com/spf13/afero v1.2.2 // indirect
|
||||
@ -26,3 +27,5 @@ require (
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||
gopkg.in/ini.v1 v1.57.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/mmcdole/gofeed => github.com/makeworld-the-better-one/gofeed v1.1.1-0.20201123002655-c0c6354134fe
|
||||
|
25
go.sum
25
go.sum
@ -16,8 +16,12 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/PuerkitoBio/goquery v1.5.1 h1:PSPBGne8NIUWw+/7vFBV+kG2J/5MOjbzc7154OaKCSE=
|
||||
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
@ -32,6 +36,7 @@ github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@ -71,6 +76,7 @@ github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.5.0 h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
@ -106,6 +112,8 @@ github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2p
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
@ -129,6 +137,8 @@ github.com/makeworld-the-better-one/go-gemini v0.9.3 h1:vpJc1u4LYpEI5h7GcOE2zSfO
|
||||
github.com/makeworld-the-better-one/go-gemini v0.9.3/go.mod h1:P7/FbZ+IEIbA/d+A0Y3w2GNgD8SA2AcNv7aDGJbaWG4=
|
||||
github.com/makeworld-the-better-one/go-isemoji v1.1.0 h1:wZBHOKB5zAIgaU2vaWnXFDDhatebB8TySrNVxjVV84g=
|
||||
github.com/makeworld-the-better-one/go-isemoji v1.1.0/go.mod h1:FBjkPl9rr0G4vlZCc+Mr+QcnOfGCTbGWYW8/1sp06I0=
|
||||
github.com/makeworld-the-better-one/gofeed v1.1.1-0.20201123002655-c0c6354134fe h1:i3b9Qy5z23DcXRnrsMYcM5s9Ng5VIidM1xZd+szuTsY=
|
||||
github.com/makeworld-the-better-one/gofeed v1.1.1-0.20201123002655-c0c6354134fe/go.mod h1:QQO3maftbOu+hiVOGOZDRLymqGQCos4zxbA4j89gMrE=
|
||||
github.com/makeworld-the-better-one/progressbar/v3 v3.3.5-0.20200710151429-125743e22b4f h1:YEUlTs5gb35UlBLTgqrub9axWTYB3d7/8TxrkJDZpRI=
|
||||
github.com/makeworld-the-better-one/progressbar/v3 v3.3.5-0.20200710151429-125743e22b4f/go.mod h1:X6sxWNi9PBgQybpR4fpXPVD5fm7svLqZTQ5DJuERIoM=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
@ -153,7 +163,13 @@ github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:F
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.3.1 h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=
|
||||
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
@ -181,8 +197,10 @@ github.com/rkoesters/xdg v0.0.0-20181125232953-edd15b846f9b h1:8NiY6v9/IlFU8osj1
|
||||
github.com/rkoesters/xdg v0.0.0-20181125232953-edd15b846f9b/go.mod h1:T1HolqzmdHnJIH6p7A9LDuvYGQgEHx9ijX3vKgDKU60=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
@ -214,6 +232,7 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/urfave/cli v1.22.3/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
gitlab.com/tslocum/cbind v0.1.1 h1:JXXtxMWHgWLvoF+QkrvcNvOQ59juy7OE1RhT7hZfdt0=
|
||||
gitlab.com/tslocum/cbind v0.1.1/go.mod h1:rX7vkl0pUSg/yy427MmD1FZAf99S7WwpUlxF/qTpPqk=
|
||||
@ -248,6 +267,7 @@ golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@ -261,7 +281,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@ -349,6 +373,7 @@ gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
@ -102,31 +102,34 @@ func MakePage(url string, res *gemini.Response, width, leftMargin int, proxied b
|
||||
if mediatype == "text/gemini" {
|
||||
rendered, links := RenderGemini(utfText, width, leftMargin, proxied)
|
||||
return &structs.Page{
|
||||
Mediatype: structs.TextGemini,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: rendered,
|
||||
Links: links,
|
||||
Mediatype: structs.TextGemini,
|
||||
RawMediatype: mediatype,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: rendered,
|
||||
Links: links,
|
||||
}, nil
|
||||
} else if strings.HasPrefix(mediatype, "text/") {
|
||||
if mediatype == "text/x-ansi" || strings.HasSuffix(url, ".ans") || strings.HasSuffix(url, ".ansi") {
|
||||
// ANSI
|
||||
return &structs.Page{
|
||||
Mediatype: structs.TextAnsi,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: RenderANSI(utfText, leftMargin),
|
||||
Links: []string{},
|
||||
Mediatype: structs.TextAnsi,
|
||||
RawMediatype: mediatype,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: RenderANSI(utfText, leftMargin),
|
||||
Links: []string{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Treated as plaintext
|
||||
return &structs.Page{
|
||||
Mediatype: structs.TextPlain,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: RenderPlainText(utfText, leftMargin),
|
||||
Links: []string{},
|
||||
Mediatype: structs.TextPlain,
|
||||
RawMediatype: mediatype,
|
||||
URL: url,
|
||||
Raw: utfText,
|
||||
Content: RenderPlainText(utfText, leftMargin),
|
||||
Links: []string{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
@ -18,18 +18,19 @@ const (
|
||||
|
||||
// Page is for storing UTF-8 text/gemini pages, as well as text/plain pages.
|
||||
type Page struct {
|
||||
URL string
|
||||
Mediatype Mediatype
|
||||
Raw string // The raw response, as received over the network
|
||||
Content string // The processed content, NOT raw. Uses cview color tags. It will also have a left margin.
|
||||
Links []string // URLs, for each region in the content.
|
||||
Row int // Scroll position
|
||||
Column int // ditto
|
||||
Width int // The terminal width when the Content was set, to know when reformatting should happen.
|
||||
Selected string // The current text or link selected
|
||||
SelectedID string // The cview region ID for the selected text/link
|
||||
Mode PageMode
|
||||
Favicon string
|
||||
URL string
|
||||
Mediatype Mediatype // Used for rendering purposes, generalized
|
||||
RawMediatype string // The actual mediatype sent by the server
|
||||
Raw string // The raw response, as received over the network
|
||||
Content string // The processed content, NOT raw. Uses cview color tags. It will also have a left margin.
|
||||
Links []string // URLs, for each region in the content.
|
||||
Row int // Scroll position
|
||||
Column int // ditto
|
||||
Width int // The terminal width when the Content was set, to know when reformatting should happen.
|
||||
Selected string // The current text or link selected
|
||||
SelectedID string // The cview region ID for the selected text/link
|
||||
Mode PageMode
|
||||
Favicon string
|
||||
}
|
||||
|
||||
// Size returns an approx. size of a Page in bytes.
|
||||
|
144
subscriptions/entries.go
Normal file
144
subscriptions/entries.go
Normal file
@ -0,0 +1,144 @@
|
||||
package subscriptions
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// This file contains funcs for creating PageEntries, which
|
||||
// are consumed by display/subscriptions.go
|
||||
|
||||
// getURL returns a URL to be used in a PageEntry, from a
|
||||
// list of URLs for that item. It prefers gemini URLs, then
|
||||
// HTTP(S), then by order.
|
||||
func getURL(urls []string) string {
|
||||
if len(urls) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
var firstHTTP string
|
||||
for _, u := range urls {
|
||||
if strings.HasPrefix(u, "gemini://") {
|
||||
return u
|
||||
}
|
||||
if (strings.HasPrefix(u, "http://") || strings.HasPrefix(u, "https://")) && firstHTTP == "" {
|
||||
// First HTTP(S) URL in the list
|
||||
firstHTTP = u
|
||||
}
|
||||
}
|
||||
if firstHTTP != "" {
|
||||
return firstHTTP
|
||||
}
|
||||
return urls[0]
|
||||
}
|
||||
|
||||
// GetPageEntries returns the current list of PageEntries
|
||||
// for use in rendering a page.
|
||||
// The contents of the returned entries will never change,
|
||||
// so this function needs to be called again to get updates.
|
||||
// It always returns sorted entries - by post time, from newest to oldest.
|
||||
func GetPageEntries() *PageEntries {
|
||||
var pe PageEntries
|
||||
|
||||
data.RLock()
|
||||
|
||||
for _, feed := range data.Feeds {
|
||||
for _, item := range feed.Items {
|
||||
if item.Links == nil || len(item.Links) == 0 {
|
||||
// Ignore items without links
|
||||
continue
|
||||
}
|
||||
|
||||
// Set pub
|
||||
|
||||
var pub time.Time
|
||||
|
||||
// Try to use updated time first, then published
|
||||
|
||||
if item.UpdatedParsed != nil && !item.UpdatedParsed.IsZero() {
|
||||
pub = *item.UpdatedParsed
|
||||
} else if item.PublishedParsed != nil && !item.PublishedParsed.IsZero() {
|
||||
pub = *item.PublishedParsed
|
||||
} else {
|
||||
// No time on the post, use now
|
||||
pub = time.Now()
|
||||
}
|
||||
|
||||
// Set prefix
|
||||
|
||||
// Prefer using the feed title over anything else.
|
||||
// Many feeds in Gemini only have this due to gemfeed's default settings.
|
||||
prefix := feed.Title
|
||||
|
||||
if prefix == "" {
|
||||
// feed.Title was empty
|
||||
|
||||
if item.Author != nil {
|
||||
// Prefer using the item author over the feed author
|
||||
prefix = item.Author.Name
|
||||
} else {
|
||||
if feed.Author != nil {
|
||||
prefix = feed.Author.Name
|
||||
} else {
|
||||
prefix = "[author unknown]"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// There's already a title, so add the author (if exists) to
|
||||
// the end of the title in parentheses.
|
||||
// Don't add the author if it's the same as the title.
|
||||
|
||||
if item.Author != nil && item.Author.Name != prefix {
|
||||
// Prefer using the item author over the feed author
|
||||
prefix += " (" + item.Author.Name + ")"
|
||||
} else if feed.Author != nil && feed.Author.Name != prefix {
|
||||
prefix += " (" + feed.Author.Name + ")"
|
||||
}
|
||||
}
|
||||
|
||||
pe.Entries = append(pe.Entries, &PageEntry{
|
||||
Prefix: prefix,
|
||||
Title: item.Title,
|
||||
URL: getURL(item.Links),
|
||||
Published: pub,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for u, page := range data.Pages {
|
||||
parsed, _ := url.Parse(u)
|
||||
|
||||
// Path is title
|
||||
title := parsed.Path
|
||||
if strings.HasPrefix(title, "/~") {
|
||||
// A user dir
|
||||
title = title[2:] // Remove beginning slash and tilde
|
||||
// Remove trailing slash if the root of a user dir is being tracked
|
||||
if strings.Count(title, "/") <= 1 && title[len(title)-1] == '/' {
|
||||
title = title[:len(title)-1]
|
||||
}
|
||||
} else if strings.HasPrefix(title, "/users/") {
|
||||
// "/users/" is removed for aesthetics when tracking hosted users
|
||||
title = strings.TrimPrefix(title, "/users/")
|
||||
title = strings.TrimPrefix(title, "~") // Remove leading tilde
|
||||
// Remove trailing slash if the root of a user dir is being tracked
|
||||
if strings.Count(title, "/") <= 1 && title[len(title)-1] == '/' {
|
||||
title = title[:len(title)-1]
|
||||
}
|
||||
}
|
||||
|
||||
pe.Entries = append(pe.Entries, &PageEntry{
|
||||
Prefix: parsed.Host,
|
||||
Title: title,
|
||||
URL: u,
|
||||
Published: page.Changed,
|
||||
})
|
||||
}
|
||||
|
||||
data.RUnlock()
|
||||
|
||||
sort.Sort(&pe)
|
||||
return &pe
|
||||
}
|
108
subscriptions/structs.go
Normal file
108
subscriptions/structs.go
Normal file
@ -0,0 +1,108 @@
|
||||
package subscriptions
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
/*
|
||||
Example stored JSON.
|
||||
|
||||
{
|
||||
"feeds": {
|
||||
"url1": <gofeed.Feed>,
|
||||
"url2": <gofeed.Feed>,
|
||||
},
|
||||
"pages": {
|
||||
"url1": {
|
||||
"hash": <hash>,
|
||||
"changed": <time>
|
||||
},
|
||||
"url2": {
|
||||
"hash": <hash>,
|
||||
"changed": <time>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"pages" are the pages tracked for changes that aren't feeds.
|
||||
The hash used is SHA-256.
|
||||
The time is in RFC 3339 format, preferably in the UTC timezone.
|
||||
*/
|
||||
|
||||
// Decoded JSON
|
||||
type jsonData struct {
|
||||
feedMu *sync.RWMutex
|
||||
pageMu *sync.RWMutex
|
||||
Feeds map[string]*gofeed.Feed `json:"feeds,omitempty"`
|
||||
Pages map[string]*pageJSON `json:"pages,omitempty"`
|
||||
}
|
||||
|
||||
// Lock locks both feed and page mutexes.
|
||||
func (j *jsonData) Lock() {
|
||||
j.feedMu.Lock()
|
||||
j.pageMu.Lock()
|
||||
}
|
||||
|
||||
// Unlock unlocks both feed and page mutexes.
|
||||
func (j *jsonData) Unlock() {
|
||||
j.feedMu.Unlock()
|
||||
j.pageMu.Unlock()
|
||||
}
|
||||
|
||||
// RLock read-locks both feed and page mutexes.
|
||||
func (j *jsonData) RLock() {
|
||||
j.feedMu.RLock()
|
||||
j.pageMu.RLock()
|
||||
}
|
||||
|
||||
// RUnlock read-unlocks both feed and page mutexes.
|
||||
func (j *jsonData) RUnlock() {
|
||||
j.feedMu.RUnlock()
|
||||
j.pageMu.RUnlock()
|
||||
}
|
||||
|
||||
type pageJSON struct {
|
||||
Hash string `json:"hash"`
|
||||
Changed time.Time `json:"changed"` // When the latest change happened
|
||||
}
|
||||
|
||||
// Global instance of jsonData - loaded from JSON and used
|
||||
var data = jsonData{
|
||||
feedMu: &sync.RWMutex{},
|
||||
pageMu: &sync.RWMutex{},
|
||||
// Maps are created in Init()
|
||||
}
|
||||
|
||||
// PageEntry is a single item on a subscriptions page.
|
||||
// It is used for both feeds and pages.
|
||||
type PageEntry struct {
|
||||
Prefix string // Feed/log title, author, etc - something before the post title
|
||||
Title string
|
||||
URL string
|
||||
Published time.Time
|
||||
}
|
||||
|
||||
// PageEntries is new-to-old list of Entry structs, used to create a
|
||||
// subscriptions page.
|
||||
// It should always be assumed to be sorted when used in other packages,
|
||||
// by post time, from newest to oldest.
|
||||
type PageEntries struct {
|
||||
Entries []*PageEntry
|
||||
}
|
||||
|
||||
// Implement sort.Interface
|
||||
|
||||
func (e *PageEntries) Len() int {
|
||||
return len(e.Entries)
|
||||
}
|
||||
|
||||
func (e *PageEntries) Less(i, j int) bool {
|
||||
return e.Entries[i].Published.After(e.Entries[j].Published)
|
||||
}
|
||||
|
||||
func (e *PageEntries) Swap(i, j int) {
|
||||
e.Entries[i], e.Entries[j] = e.Entries[j], e.Entries[i]
|
||||
}
|
381
subscriptions/subscriptions.go
Normal file
381
subscriptions/subscriptions.go
Normal file
@ -0,0 +1,381 @@
|
||||
package subscriptions
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/makeworld-the-better-one/amfora/client"
|
||||
"github.com/makeworld-the-better-one/amfora/config"
|
||||
"github.com/makeworld-the-better-one/go-gemini"
|
||||
"github.com/mmcdole/gofeed"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// TODO: Test for deadlocks and whether there should be more
|
||||
// goroutines for file writing or other things.
|
||||
|
||||
var (
|
||||
ErrSaving = errors.New("couldn't save JSON to disk")
|
||||
ErrNotSuccess = errors.New("status 20 not returned")
|
||||
ErrNotFeed = errors.New("not a valid feed")
|
||||
)
|
||||
|
||||
var writeMu = sync.Mutex{} // Prevent concurrent writes to subscriptions.json file
|
||||
|
||||
// LastUpdated is the time when the in-memory data was last updated.
|
||||
// It can be used to know if the subscriptions page should be regenerated.
|
||||
var LastUpdated time.Time
|
||||
|
||||
// Init should be called after config.Init.
|
||||
func Init() error {
|
||||
f, err := os.Open(config.SubscriptionPath)
|
||||
if err == nil {
|
||||
// File exists and could be opened
|
||||
|
||||
fi, err := f.Stat()
|
||||
if err == nil && fi.Size() > 0 {
|
||||
// File is not empty
|
||||
|
||||
jsonBytes, err := ioutil.ReadAll(f)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
return fmt.Errorf("read subscriptions.json error: %w", err)
|
||||
}
|
||||
err = json.Unmarshal(jsonBytes, &data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("subscriptions.json is corrupted: %w", err)
|
||||
}
|
||||
}
|
||||
f.Close()
|
||||
} else if !os.IsNotExist(err) {
|
||||
// There's an error opening the file, but it's not bc is doesn't exist
|
||||
return fmt.Errorf("open subscriptions.json error: %w", err)
|
||||
} else {
|
||||
// File does not exist, initialize maps
|
||||
data.Feeds = make(map[string]*gofeed.Feed)
|
||||
data.Pages = make(map[string]*pageJSON)
|
||||
}
|
||||
|
||||
LastUpdated = time.Now()
|
||||
|
||||
if viper.GetInt("subscriptions.update_interval") > 0 {
|
||||
// Update subscriptions every so often
|
||||
go func() {
|
||||
for {
|
||||
updateAll()
|
||||
time.Sleep(time.Duration(viper.GetInt("subscriptions.update_interval")) * time.Second)
|
||||
}
|
||||
}()
|
||||
} else {
|
||||
// User disabled automatic updates
|
||||
// So just update once at the beginning
|
||||
go updateAll()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsSubscribed returns true if the URL is already subscribed to,
|
||||
// whether a feed or page.
|
||||
func IsSubscribed(url string) bool {
|
||||
data.feedMu.RLock()
|
||||
for u := range data.Feeds {
|
||||
if url == u {
|
||||
data.feedMu.RUnlock()
|
||||
return true
|
||||
}
|
||||
}
|
||||
data.feedMu.RUnlock()
|
||||
data.pageMu.RLock()
|
||||
for u := range data.Pages {
|
||||
if url == u {
|
||||
data.pageMu.RUnlock()
|
||||
return true
|
||||
}
|
||||
}
|
||||
data.pageMu.RUnlock()
|
||||
return false
|
||||
}
|
||||
|
||||
// GetFeed returns a Feed object and a bool indicating whether the passed
|
||||
// content was actually recognized as a feed.
|
||||
func GetFeed(mediatype, filename string, r io.Reader) (*gofeed.Feed, bool) {
|
||||
if r == nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Check mediatype and filename
|
||||
if mediatype != "application/atom+xml" && mediatype != "application/rss+xml" && mediatype != "application/json+feed" &&
|
||||
filename != "atom.xml" && filename != "feed.xml" && filename != "feed.json" &&
|
||||
!strings.HasSuffix(filename, ".atom") && !strings.HasSuffix(filename, ".rss") {
|
||||
// No part of the above is true
|
||||
return nil, false
|
||||
}
|
||||
feed, err := gofeed.NewParser().Parse(r)
|
||||
if feed == nil {
|
||||
return nil, false
|
||||
}
|
||||
return feed, err == nil
|
||||
}
|
||||
|
||||
func writeJSON() error {
|
||||
writeMu.Lock()
|
||||
defer writeMu.Unlock()
|
||||
|
||||
data.Lock()
|
||||
jsonBytes, err := json.MarshalIndent(&data, "", " ")
|
||||
data.Unlock()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = ioutil.WriteFile(config.SubscriptionPath, jsonBytes, 0666)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddFeed stores a feed.
|
||||
// It can be used to update a feed for a URL, although the package
|
||||
// will handle that on its own.
|
||||
func AddFeed(url string, feed *gofeed.Feed) error {
|
||||
if feed == nil {
|
||||
panic("feed is nil")
|
||||
}
|
||||
|
||||
// Remove any unused fields to save memory and disk space
|
||||
feed.Image = nil
|
||||
feed.Generator = ""
|
||||
feed.Categories = nil
|
||||
feed.DublinCoreExt = nil
|
||||
feed.ITunesExt = nil
|
||||
feed.Custom = nil
|
||||
feed.Link = ""
|
||||
feed.Links = nil
|
||||
for _, item := range feed.Items {
|
||||
item.Description = ""
|
||||
item.Content = ""
|
||||
item.Image = nil
|
||||
item.Categories = nil
|
||||
item.Enclosures = nil
|
||||
item.DublinCoreExt = nil
|
||||
item.ITunesExt = nil
|
||||
item.Extensions = nil
|
||||
item.Custom = nil
|
||||
item.Link = "" // Links is used instead
|
||||
}
|
||||
|
||||
data.feedMu.Lock()
|
||||
oldFeed, ok := data.Feeds[url]
|
||||
if !ok || !reflect.DeepEqual(feed, oldFeed) {
|
||||
// Feeds are different, or there was never an old one
|
||||
|
||||
LastUpdated = time.Now()
|
||||
data.Feeds[url] = feed
|
||||
data.feedMu.Unlock()
|
||||
err := writeJSON()
|
||||
if err != nil {
|
||||
return ErrSaving
|
||||
}
|
||||
} else {
|
||||
data.feedMu.Unlock()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddPage stores a page to track for changes.
|
||||
// It can be used to update the page as well, although the package
|
||||
// will handle that on its own.
|
||||
func AddPage(url string, r io.Reader) error {
|
||||
if r == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
h := sha256.New()
|
||||
if _, err := io.Copy(h, r); err != nil {
|
||||
return err
|
||||
}
|
||||
newHash := fmt.Sprintf("%x", h.Sum(nil))
|
||||
|
||||
data.pageMu.Lock()
|
||||
_, ok := data.Pages[url]
|
||||
if !ok || data.Pages[url].Hash != newHash {
|
||||
// Page content is different, or it didn't exist
|
||||
|
||||
LastUpdated = time.Now()
|
||||
data.Pages[url] = &pageJSON{
|
||||
Hash: newHash,
|
||||
Changed: time.Now().UTC(),
|
||||
}
|
||||
|
||||
data.pageMu.Unlock()
|
||||
err := writeJSON()
|
||||
if err != nil {
|
||||
return ErrSaving
|
||||
}
|
||||
} else {
|
||||
data.pageMu.Unlock()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func updateFeed(url string) error {
|
||||
res, err := client.Fetch(url)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
res.Body.Close()
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.Status != gemini.StatusSuccess {
|
||||
return ErrNotSuccess
|
||||
}
|
||||
mediatype, _, err := mime.ParseMediaType(res.Meta)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
filename := path.Base(url)
|
||||
feed, ok := GetFeed(mediatype, filename, res.Body)
|
||||
if !ok {
|
||||
return ErrNotFeed
|
||||
}
|
||||
return AddFeed(url, feed)
|
||||
}
|
||||
|
||||
func updatePage(url string) error {
|
||||
res, err := client.Fetch(url)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
res.Body.Close()
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.Status != gemini.StatusSuccess {
|
||||
return ErrNotSuccess
|
||||
}
|
||||
|
||||
return AddPage(url, res.Body)
|
||||
}
|
||||
|
||||
// updateAll updates all subscriptions using workers.
|
||||
// It only returns once all the workers are done.
|
||||
func updateAll() {
|
||||
// TODO: Is two goroutines the right amount?
|
||||
|
||||
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
|
||||
// Each job is: [2]string{<type>, "url"}
|
||||
// where <type> is "feed" or "page"
|
||||
|
||||
defer wg.Done()
|
||||
for j := range jobs {
|
||||
if j[0] == "feed" {
|
||||
updateFeed(j[1]) //nolint:errcheck
|
||||
} else if j[0] == "page" {
|
||||
updatePage(j[1]) //nolint:errcheck
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
data.RLock()
|
||||
numJobs := len(data.Feeds) + len(data.Pages)
|
||||
jobs := make(chan [2]string, numJobs)
|
||||
|
||||
if numJobs == 0 {
|
||||
data.RUnlock()
|
||||
return
|
||||
}
|
||||
|
||||
numWorkers := viper.GetInt("subscriptions.workers")
|
||||
if numWorkers < 1 {
|
||||
numWorkers = 1
|
||||
}
|
||||
|
||||
// Start workers, waiting for jobs
|
||||
for w := 0; w < numWorkers; w++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
worker(jobs, &wg)
|
||||
}()
|
||||
}
|
||||
|
||||
// Get map keys in a slice
|
||||
|
||||
feedKeys := make([]string, len(data.Feeds))
|
||||
i := 0
|
||||
for k := range data.Feeds {
|
||||
feedKeys[i] = k
|
||||
i++
|
||||
}
|
||||
|
||||
pageKeys := make([]string, len(data.Pages))
|
||||
i = 0
|
||||
for k := range data.Pages {
|
||||
pageKeys[i] = k
|
||||
i++
|
||||
}
|
||||
data.RUnlock()
|
||||
|
||||
for j := 0; j < numJobs; j++ {
|
||||
if j < len(feedKeys) {
|
||||
jobs <- [2]string{"feed", feedKeys[j]}
|
||||
} else {
|
||||
// In the Pages
|
||||
jobs <- [2]string{"page", pageKeys[j-len(feedKeys)]}
|
||||
}
|
||||
}
|
||||
close(jobs)
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// AllURLs returns all the subscribed-to URLS.
|
||||
func AllURLS() []string {
|
||||
data.RLock()
|
||||
defer data.RUnlock()
|
||||
|
||||
urls := make([]string, len(data.Feeds)+len(data.Pages))
|
||||
i := 0
|
||||
for k := range data.Feeds {
|
||||
urls[i] = k
|
||||
i++
|
||||
}
|
||||
for k := range data.Pages {
|
||||
urls[i] = k
|
||||
i++
|
||||
}
|
||||
|
||||
return urls
|
||||
}
|
||||
|
||||
// Remove removes a subscription from memory and from the disk.
|
||||
// The URL must be provided. It will do nothing if the URL is
|
||||
// not an actual subscription.
|
||||
//
|
||||
// It returns any errors that occurred when saving to disk.
|
||||
func Remove(u string) error {
|
||||
data.Lock()
|
||||
// Just delete from both instead of using a loop to find it
|
||||
delete(data.Feeds, u)
|
||||
delete(data.Pages, u)
|
||||
data.Unlock()
|
||||
return writeJSON()
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user