mirror of
https://github.com/makew0rld/amfora.git
synced 2024-12-04 14:46:29 -05:00
🎨 Rename page cache funcs
This commit is contained in:
parent
f3b7437046
commit
edd128e7c5
26
cache/cache.go
vendored
26
cache/cache.go
vendored
@ -42,12 +42,12 @@ func removeUrl(url string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add adds a page to the cache, removing earlier pages as needed
|
// AddPage adds a page to the cache, removing earlier pages as needed
|
||||||
// to keep the cache inside its limits.
|
// to keep the cache inside its limits.
|
||||||
//
|
//
|
||||||
// If your page is larger than the max cache size, the provided page
|
// If your page is larger than the max cache size, the provided page
|
||||||
// will silently not be added to the cache.
|
// will silently not be added to the cache.
|
||||||
func Add(p *structs.Page) {
|
func AddPage(p *structs.Page) {
|
||||||
if p.Url == "" || strings.HasPrefix(p.Url, "about:") {
|
if p.Url == "" || strings.HasPrefix(p.Url, "about:") {
|
||||||
// Just in case, these pages shouldn't be cached
|
// Just in case, these pages shouldn't be cached
|
||||||
return
|
return
|
||||||
@ -62,11 +62,11 @@ func Add(p *structs.Page) {
|
|||||||
// There should only ever be 1 page to remove at most,
|
// There should only ever be 1 page to remove at most,
|
||||||
// but this handles more just in case.
|
// but this handles more just in case.
|
||||||
for NumPages() >= maxPages && maxPages > 0 {
|
for NumPages() >= maxPages && maxPages > 0 {
|
||||||
Remove(urls[0])
|
RemovePage(urls[0])
|
||||||
}
|
}
|
||||||
// Do the same but for cache size
|
// Do the same but for cache size
|
||||||
for Size()+p.Size() > maxSize && maxSize > 0 {
|
for SizePages()+p.Size() > maxSize && maxSize > 0 {
|
||||||
Remove(urls[0])
|
RemovePage(urls[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
lock.Lock()
|
lock.Lock()
|
||||||
@ -77,25 +77,25 @@ func Add(p *structs.Page) {
|
|||||||
urls = append(urls, p.Url)
|
urls = append(urls, p.Url)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove will remove a page from the cache.
|
// RemovePage will remove a page from the cache.
|
||||||
// Even if the page doesn't exist there will be no error.
|
// Even if the page doesn't exist there will be no error.
|
||||||
func Remove(url string) {
|
func RemovePage(url string) {
|
||||||
lock.Lock()
|
lock.Lock()
|
||||||
defer lock.Unlock()
|
defer lock.Unlock()
|
||||||
delete(pages, url)
|
delete(pages, url)
|
||||||
removeUrl(url)
|
removeUrl(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear removes all pages from the cache.
|
// ClearPages removes all pages from the cache.
|
||||||
func Clear() {
|
func ClearPages() {
|
||||||
lock.Lock()
|
lock.Lock()
|
||||||
defer lock.Unlock()
|
defer lock.Unlock()
|
||||||
pages = make(map[string]*structs.Page)
|
pages = make(map[string]*structs.Page)
|
||||||
urls = make([]string, 0)
|
urls = make([]string, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Size returns the approx. current size of the cache in bytes.
|
// SizePages returns the approx. current size of the cache in bytes.
|
||||||
func Size() int {
|
func SizePages() int {
|
||||||
lock.RLock()
|
lock.RLock()
|
||||||
defer lock.RUnlock()
|
defer lock.RUnlock()
|
||||||
n := 0
|
n := 0
|
||||||
@ -111,9 +111,9 @@ func NumPages() int {
|
|||||||
return len(pages)
|
return len(pages)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get returns the page struct, and a bool indicating if the page was in the cache or not.
|
// GetPage returns the page struct, and a bool indicating if the page was in the cache or not.
|
||||||
// An empty page struct is returned if the page isn't in the cache.
|
// An empty page struct is returned if the page isn't in the cache.
|
||||||
func Get(url string) (*structs.Page, bool) {
|
func GetPage(url string) (*structs.Page, bool) {
|
||||||
lock.RLock()
|
lock.RLock()
|
||||||
defer lock.RUnlock()
|
defer lock.RUnlock()
|
||||||
p, ok := pages[url]
|
p, ok := pages[url]
|
||||||
|
28
cache/cache_test.go
vendored
28
cache/cache_test.go
vendored
@ -12,7 +12,7 @@ var p2 = structs.Page{Url: "example.org"}
|
|||||||
var queryPage = structs.Page{Url: "gemini://example.com/test?query"}
|
var queryPage = structs.Page{Url: "gemini://example.com/test?query"}
|
||||||
|
|
||||||
func reset() {
|
func reset() {
|
||||||
Clear()
|
ClearPages()
|
||||||
SetMaxPages(0)
|
SetMaxPages(0)
|
||||||
SetMaxSize(0)
|
SetMaxSize(0)
|
||||||
}
|
}
|
||||||
@ -20,8 +20,8 @@ func reset() {
|
|||||||
func TestMaxPages(t *testing.T) {
|
func TestMaxPages(t *testing.T) {
|
||||||
reset()
|
reset()
|
||||||
SetMaxPages(1)
|
SetMaxPages(1)
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
Add(&p2)
|
AddPage(&p2)
|
||||||
assert.Equal(t, 1, NumPages(), "there should only be one page")
|
assert.Equal(t, 1, NumPages(), "there should only be one page")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,24 +29,24 @@ func TestMaxSize(t *testing.T) {
|
|||||||
reset()
|
reset()
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
SetMaxSize(p.Size())
|
SetMaxSize(p.Size())
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
assert.Equal(1, NumPages(), "one page should be added")
|
assert.Equal(1, NumPages(), "one page should be added")
|
||||||
Add(&p2)
|
AddPage(&p2)
|
||||||
assert.Equal(1, NumPages(), "there should still be just one page due to cache size limits")
|
assert.Equal(1, NumPages(), "there should still be just one page due to cache size limits")
|
||||||
assert.Equal(p2.Url, urls[0], "the only page url should be the second page one")
|
assert.Equal(p2.Url, urls[0], "the only page url should be the second page one")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRemove(t *testing.T) {
|
func TestRemove(t *testing.T) {
|
||||||
reset()
|
reset()
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
Remove(p.Url)
|
RemovePage(p.Url)
|
||||||
assert.Equal(t, 0, NumPages(), "there shouldn't be any pages after the removal")
|
assert.Equal(t, 0, NumPages(), "there shouldn't be any pages after the removal")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestClearAndNumPages(t *testing.T) {
|
func TestClearAndNumPages(t *testing.T) {
|
||||||
reset()
|
reset()
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
Clear()
|
ClearPages()
|
||||||
assert.Equal(t, 0, len(pages), "map should be empty")
|
assert.Equal(t, 0, len(pages), "map should be empty")
|
||||||
assert.Equal(t, 0, len(urls), "urls slice shoulde be empty")
|
assert.Equal(t, 0, len(urls), "urls slice shoulde be empty")
|
||||||
assert.Equal(t, 0, NumPages(), "NumPages should report empty too")
|
assert.Equal(t, 0, NumPages(), "NumPages should report empty too")
|
||||||
@ -54,15 +54,15 @@ func TestClearAndNumPages(t *testing.T) {
|
|||||||
|
|
||||||
func TestSize(t *testing.T) {
|
func TestSize(t *testing.T) {
|
||||||
reset()
|
reset()
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
assert.Equal(t, p.Size(), Size(), "sizes should match")
|
assert.Equal(t, p.Size(), SizePages(), "sizes should match")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGet(t *testing.T) {
|
func TestGet(t *testing.T) {
|
||||||
reset()
|
reset()
|
||||||
Add(&p)
|
AddPage(&p)
|
||||||
Add(&p2)
|
AddPage(&p2)
|
||||||
page, ok := Get(p.Url)
|
page, ok := GetPage(p.Url)
|
||||||
if !ok {
|
if !ok {
|
||||||
t.Fatal("Get should say that the page was found")
|
t.Fatal("Get should say that the page was found")
|
||||||
}
|
}
|
||||||
|
4
cache/redir.go
vendored
4
cache/redir.go
vendored
@ -1,8 +1,6 @@
|
|||||||
package cache
|
package cache
|
||||||
|
|
||||||
import (
|
import "sync"
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Functions for caching redirects.
|
// Functions for caching redirects.
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
# example.com:123
|
# example.com:123
|
||||||
|
|
||||||
[a-general]
|
[a-general]
|
||||||
|
# Press Ctrl-H to access it
|
||||||
home = "gemini://gemini.circumlunar.space"
|
home = "gemini://gemini.circumlunar.space"
|
||||||
|
|
||||||
# What command to run to open a HTTP URL. Set to "default" to try to guess the browser,
|
# What command to run to open a HTTP URL. Set to "default" to try to guess the browser,
|
||||||
@ -16,21 +17,36 @@ home = "gemini://gemini.circumlunar.space"
|
|||||||
# If a command is set, than the URL will be added (in quotes) to the end of the command.
|
# If a command is set, than the URL will be added (in quotes) to the end of the command.
|
||||||
# A space will be prepended if necessary.
|
# A space will be prepended if necessary.
|
||||||
http = "default"
|
http = "default"
|
||||||
search = "gemini://gus.guru/search" # Any URL that will accept a query string can be put here
|
|
||||||
color = true # Whether colors will be used in the terminal
|
# Any URL that will accept a query string can be put here
|
||||||
bullets = true # Whether to replace list asterisks with unicode bullets
|
search = "gemini://gus.guru/search"
|
||||||
|
|
||||||
|
# Whether colors will be used in the terminal
|
||||||
|
color = true
|
||||||
|
|
||||||
|
# Whether to replace list asterisks with unicode bullets
|
||||||
|
bullets = true
|
||||||
|
|
||||||
# A number from 0 to 1, indicating what percentage of the terminal width the left margin should take up.
|
# A number from 0 to 1, indicating what percentage of the terminal width the left margin should take up.
|
||||||
left_margin = 0.15
|
left_margin = 0.15
|
||||||
max_width = 100 # The max number of columns to wrap a page's text to. Preformatted blocks are not wrapped.
|
|
||||||
|
# The max number of columns to wrap a page's text to. Preformatted blocks are not wrapped.
|
||||||
|
max_width = 100
|
||||||
|
|
||||||
# 'downloads' is the path to a downloads folder.
|
# 'downloads' is the path to a downloads folder.
|
||||||
# An empty value means the code will find the default downloads folder for your system.
|
# An empty value means the code will find the default downloads folder for your system.
|
||||||
# If the path does not exist it will be created.
|
# If the path does not exist it will be created.
|
||||||
downloads = ""
|
downloads = ""
|
||||||
|
|
||||||
# Max size for displayable content in bytes - after that size a download window pops up
|
# Max size for displayable content in bytes - after that size a download window pops up
|
||||||
page_max_size = 2097152 # 2 MiB
|
page_max_size = 2097152 # 2 MiB
|
||||||
# Max time it takes to load a page in seconds - after that a download window pops up
|
# Max time it takes to load a page in seconds - after that a download window pops up
|
||||||
page_max_time = 10
|
page_max_time = 10
|
||||||
|
|
||||||
|
# Whether to replace tab numbers with emoji favicons, which are cached.
|
||||||
|
emoji_favicons = false
|
||||||
|
|
||||||
|
|
||||||
# Options for page cache - which is only for text/gemini pages
|
# Options for page cache - which is only for text/gemini pages
|
||||||
# Increase the cache size to speed up browsing at the expense of memory
|
# Increase the cache size to speed up browsing at the expense of memory
|
||||||
[cache]
|
[cache]
|
||||||
@ -38,15 +54,16 @@ page_max_time = 10
|
|||||||
max_size = 0 # Size in bytes
|
max_size = 0 # Size in bytes
|
||||||
max_pages = 30 # The maximum number of pages the cache will store
|
max_pages = 30 # The maximum number of pages the cache will store
|
||||||
|
|
||||||
|
|
||||||
[theme]
|
[theme]
|
||||||
# This section is for changing the COLORS used in Amfora.
|
# This section is for changing the COLORS used in Amfora.
|
||||||
# These colors only apply if color is enabled above.
|
# These colors only apply if 'color' is enabled above.
|
||||||
# Colors can be set using a W3C color name, or a hex value such as #ffffff".
|
# Colors can be set using a W3C color name, or a hex value such as "#ffffff".
|
||||||
|
|
||||||
# Note that not all colors will work on terminals that do not have truecolor support.
|
# Note that not all colors will work on terminals that do not have truecolor support.
|
||||||
# If you want to stick to the standard 16 or 256 colors, you can get
|
# If you want to stick to the standard 16 or 256 colors, you can get
|
||||||
# a list of those here: https://jonasjacek.github.io/colors/
|
# a list of those here: https://jonasjacek.github.io/colors/
|
||||||
# Do NOT use the names from that site, just the hex codes.
|
# DO NOT use the names from that site, just the hex codes.
|
||||||
|
|
||||||
# Definitions:
|
# Definitions:
|
||||||
# bg = background
|
# bg = background
|
||||||
|
@ -180,11 +180,11 @@ func Init() {
|
|||||||
// Detect if it's a search or URL
|
// Detect if it's a search or URL
|
||||||
if strings.Contains(query, " ") || (!strings.Contains(query, "//") && !strings.Contains(query, ".") && !strings.HasPrefix(query, "about:")) {
|
if strings.Contains(query, " ") || (!strings.Contains(query, "//") && !strings.Contains(query, ".") && !strings.HasPrefix(query, "about:")) {
|
||||||
u := viper.GetString("a-general.search") + "?" + queryEscape(query)
|
u := viper.GetString("a-general.search") + "?" + queryEscape(query)
|
||||||
cache.Remove(u) // Don't use the cached version of the search
|
cache.RemovePage(u) // Don't use the cached version of the search
|
||||||
URL(u)
|
URL(u)
|
||||||
} else {
|
} else {
|
||||||
// Full URL
|
// Full URL
|
||||||
cache.Remove(query) // Don't use cached version for manually entered URL
|
cache.RemovePage(query) // Don't use cached version for manually entered URL
|
||||||
URL(query)
|
URL(query)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
@ -550,7 +550,7 @@ func Reload() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
go cache.Remove(tabs[curTab].page.Url)
|
go cache.RemovePage(tabs[curTab].page.Url)
|
||||||
go func(t *tab) {
|
go func(t *tab) {
|
||||||
handleURL(t, t.page.Url) // goURL is not used bc history shouldn't be added to
|
handleURL(t, t.page.Url) // goURL is not used bc history shouldn't be added to
|
||||||
if t == tabs[curTab] {
|
if t == tabs[curTab] {
|
||||||
|
@ -221,7 +221,7 @@ func handleURL(t *tab, u string) (string, bool) {
|
|||||||
// Gemini URL
|
// Gemini URL
|
||||||
|
|
||||||
// Load page from cache if possible
|
// Load page from cache if possible
|
||||||
page, ok := cache.Get(u)
|
page, ok := cache.GetPage(u)
|
||||||
if ok {
|
if ok {
|
||||||
setPage(t, page)
|
setPage(t, page)
|
||||||
return ret(u, true)
|
return ret(u, true)
|
||||||
@ -285,7 +285,7 @@ func handleURL(t *tab, u string) (string, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
page.Width = termW
|
page.Width = termW
|
||||||
go cache.Add(page)
|
go cache.AddPage(page)
|
||||||
setPage(t, page)
|
setPage(t, page)
|
||||||
return ret(u, true)
|
return ret(u, true)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user