5 Commits

Author SHA1 Message Date
Wes Koop
74e6ee8e8e Do not add filter for root folder, allowing ALL feeds to be marked as read.
Reeder Fever behavious is to send an id=0 when you mark all items as read
2026-04-27 22:01:12 +01:00
nkanaev
167aef9ba1 remove feed_sizes 2026-04-27 21:51:12 +01:00
nkanaev
ed726f26f4 change DeleteOldItems logic 2026-04-27 21:41:56 +01:00
nkanaev
760f611007 add item.last_arrived field 2026-04-27 21:05:25 +01:00
nkanaev
49c704037b cmd: modernize -fix ./cmd/... 2026-04-27 20:44:24 +01:00
18 changed files with 202 additions and 198 deletions

View File

@@ -51,7 +51,7 @@ func Template(path string) *template.Template {
return tmpl
}
func Render(path string, writer io.Writer, data interface{}) {
func Render(path string, writer io.Writer, data any) {
tmpl := Template(path)
tmpl.Execute(writer, data)
}

View File

@@ -9,6 +9,7 @@ import (
"fmt"
"io"
"regexp"
"slices"
"strconv"
"strings"
@@ -225,10 +226,8 @@ func hasRequiredAttributes(tagName string, attributes []string) bool {
for element, attrs := range elements {
if tagName == element {
for _, attribute := range attributes {
for _, attr := range attrs {
if attr == attribute {
return true
}
if slices.Contains(attrs, attribute) {
return true
}
}
@@ -285,13 +284,7 @@ func isValidIframeSource(baseURL, src string) bool {
return true
}
for _, safeDomain := range whitelist {
if safeDomain == domain {
return true
}
}
return false
return slices.Contains(whitelist, domain)
}
func getTagAllowList() map[string][]string {
@@ -355,13 +348,7 @@ func getTagAllowList() map[string][]string {
}
func inList(needle string, haystack []string) bool {
for _, element := range haystack {
if element == needle {
return true
}
}
return false
return slices.Contains(haystack, needle)
}
func isBlockedTag(tagName string) bool {
@@ -371,13 +358,7 @@ func isBlockedTag(tagName string) bool {
"style",
}
for _, element := range blacklist {
if element == tagName {
return true
}
}
return false
return slices.Contains(blacklist, tagName)
}
/*

View File

@@ -2,6 +2,7 @@ package scraper
import (
"net/url"
"slices"
"strings"
"github.com/nkanaev/yarr/src/content/htmlutil"
@@ -22,10 +23,8 @@ func FindFeeds(body string, base string) map[string]string {
isFeedLink := func(n *html.Node) bool {
if n.Type == html.ElementNode && n.Data == "link" {
t := htmlutil.Attr(n, "type")
for _, tt := range linkTypes {
if tt == t {
return true
}
if slices.Contains(linkTypes, t) {
return true
}
}
return false

View File

@@ -216,7 +216,7 @@ func TestRSSTitleHTMLTags(t *testing.T) {
`))
have := []string{feed.Items[0].Title, feed.Items[1].Title}
want := []string{"title in p", "very strong title"}
for i := 0; i < len(want); i++ {
for i := range want {
if want[i] != have[i] {
t.Errorf("title doesn't match\nwant: %#v\nhave: %#v\n", want[i], have[i])
}
@@ -241,7 +241,7 @@ func TestRSSIsPermalink(t *testing.T) {
URL: "http://example.com/posts/1",
},
}
for i := 0; i < len(want); i++ {
for i := range want {
if !reflect.DeepEqual(want, have) {
t.Errorf("Failed to handle isPermalink\nwant: %#v\nhave: %#v\n", want[i], have[i])
}

View File

@@ -46,7 +46,7 @@ func TestSafeXMLReaderPartial1(t *testing.T) {
f = NewSafeXMLReader(f)
buf := make([]byte, 1)
for i := 0; i < len(want); i++ {
for i := range want {
n, err := f.Read(buf)
if err != nil {
t.Fatal(err)

View File

@@ -44,7 +44,7 @@ func (m *Middleware) Handler(c *router.Context) {
c.Redirect(rootUrl)
return
} else {
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]interface{}{
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]any{
"username": username,
"error": "Invalid username/password",
"settings": m.DB.GetSettings(),
@@ -52,7 +52,7 @@ func (m *Middleware) Handler(c *router.Context) {
return
}
}
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]interface{}{
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]any{
"settings": m.DB.GetSettings(),
})
}

View File

@@ -53,7 +53,7 @@ type FeverFavicon struct {
Data string `json:"data"`
}
func writeFeverJSON(c *router.Context, data map[string]interface{}, lastRefreshed int64) {
func writeFeverJSON(c *router.Context, data map[string]any, lastRefreshed int64) {
data["api_version"] = 3
data["auth"] = 1
data["last_refreshed_on_time"] = lastRefreshed
@@ -78,7 +78,7 @@ func (s *Server) feverAuth(c *router.Context) bool {
if s.Username != "" && s.Password != "" {
apiKey := c.Req.FormValue("api_key")
apiKey = strings.ToLower(apiKey)
md5HashValue := md5.Sum([]byte(fmt.Sprintf("%s:%s", s.Username, s.Password)))
md5HashValue := md5.Sum(fmt.Appendf(nil, "%s:%s", s.Username, s.Password))
hexMD5HashValue := fmt.Sprintf("%x", md5HashValue[:])
if !auth.StringsEqual(apiKey, hexMD5HashValue) {
return false
@@ -97,7 +97,7 @@ func formHasValue(values url.Values, value string) bool {
func (s *Server) handleFever(c *router.Context) {
c.Req.ParseForm()
if !s.feverAuth(c) {
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"api_version": 3,
"auth": 0,
"last_refreshed_on_time": 0,
@@ -123,7 +123,7 @@ func (s *Server) handleFever(c *router.Context) {
case formHasValue(c.Req.Form, "mark"):
s.feverMarkHandler(c)
default:
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"api_version": 3,
"auth": 1,
"last_refreshed_on_time": getLastRefreshedOnTime(s.db.ListHTTPStates()),
@@ -168,7 +168,7 @@ func (s *Server) feverGroupsHandler(c *router.Context) {
for i, folder := range folders {
groups[i] = &FeverGroup{ID: folder.Id, Title: folder.Title}
}
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"groups": groups,
"feeds_groups": feedGroups(s.db),
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
@@ -194,7 +194,7 @@ func (s *Server) feverFeedsHandler(c *router.Context) {
LastUpdated: lastUpdated,
}
}
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"feeds": feverFeeds,
"feeds_groups": feedGroups(s.db),
}, getLastRefreshedOnTime(httpStates))
@@ -216,7 +216,7 @@ func (s *Server) feverFaviconsHandler(c *router.Context) {
favicons[i] = &FeverFavicon{ID: feed.Id, Data: data}
}
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"favicons": favicons,
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
}
@@ -280,15 +280,15 @@ func (s *Server) feverItemsHandler(c *router.Context) {
totalItems := s.db.CountItems(storage.ItemFilter{})
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"items": feverItems,
"total_items": totalItems,
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
}
func (s *Server) feverLinksHandler(c *router.Context) {
writeFeverJSON(c, map[string]interface{}{
"links": make([]interface{}, 0),
writeFeverJSON(c, map[string]any{
"links": make([]any, 0),
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
}
@@ -309,7 +309,7 @@ func (s *Server) feverUnreadItemIDsHandler(c *router.Context) {
}
itemFilter.After = &items[len(items)-1].Id
}
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"unread_item_ids": joinInts(itemIds),
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
}
@@ -331,7 +331,7 @@ func (s *Server) feverSavedItemIDsHandler(c *router.Context) {
}
itemFilter.After = &items[len(items)-1].Id
}
writeFeverJSON(c, map[string]interface{}{
writeFeverJSON(c, map[string]any{
"saved_item_ids": joinInts(itemIds),
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
}
@@ -375,7 +375,10 @@ func (s *Server) feverMarkHandler(c *router.Context) {
if c.Req.Form.Get("as") != "read" {
c.Out.WriteHeader(http.StatusBadRequest)
}
markFilter := storage.MarkFilter{FolderID: &id}
markFilter := storage.MarkFilter{}
if id > 0 {
markFilter.FolderID = &id
}
x, _ := strconv.ParseInt(c.Req.Form.Get("before"), 10, 64)
if x > 0 {
before := time.Unix(x, 0).UTC()
@@ -386,7 +389,7 @@ func (s *Server) feverMarkHandler(c *router.Context) {
c.Out.WriteHeader(http.StatusBadRequest)
return
}
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"api_version": 3,
"auth": 1,
})

View File

@@ -24,7 +24,7 @@ func (c *Context) Next() {
c.chain[c.index](c)
}
func (c *Context) JSON(status int, data interface{}) {
func (c *Context) JSON(status int, data any) {
body, err := json.Marshal(data)
if err != nil {
log.Fatal(err)
@@ -35,7 +35,7 @@ func (c *Context) JSON(status int, data interface{}) {
c.Out.Write([]byte("\n"))
}
func (c *Context) HTML(status int, tmpl *template.Template, data interface{}) {
func (c *Context) HTML(status int, tmpl *template.Template, data any) {
c.Out.Header().Set("Content-Type", "text/html")
c.Out.WriteHeader(status)
tmpl.Execute(c.Out, data)

View File

@@ -64,7 +64,7 @@ func (s *Server) handler() http.Handler {
}
func (s *Server) handleIndex(c *router.Context) {
c.HTML(http.StatusOK, assets.Template("index.html"), map[string]interface{}{
c.HTML(http.StatusOK, assets.Template("index.html"), map[string]any{
"settings": s.db.GetSettings(),
"authenticated": s.Username != "" && s.Password != "",
})
@@ -82,14 +82,14 @@ func (s *Server) handleStatic(c *router.Context) {
}
func (s *Server) handleManifest(c *router.Context) {
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"$schema": "https://json.schemastore.org/web-manifest-combined.json",
"name": "yarr!",
"short_name": "yarr",
"description": "yet another rss reader",
"display": "standalone",
"start_url": "/" + strings.TrimPrefix(s.BasePath, "/"),
"icons": []map[string]interface{}{
"icons": []map[string]any{
{
"src": s.BasePath + "/static/graphicarts/favicon.png",
"sizes": "64x64",
@@ -100,7 +100,7 @@ func (s *Server) handleManifest(c *router.Context) {
}
func (s *Server) handleStatus(c *router.Context) {
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"running": s.worker.FeedsPending(),
"stats": s.db.FeedStats(),
})
@@ -239,7 +239,7 @@ func (s *Server) handleFeedList(c *router.Context) {
case len(result.Sources) > 0:
c.JSON(
http.StatusOK,
map[string]interface{}{"status": "multiple", "choice": result.Sources},
map[string]any{"status": "multiple", "choice": result.Sources},
)
case result.Feed != nil:
feed := s.db.CreateFeed(
@@ -252,12 +252,11 @@ func (s *Server) handleFeedList(c *router.Context) {
items := worker.ConvertItems(result.Feed.Items, *feed)
if len(items) > 0 {
s.db.CreateItems(items)
s.db.SetFeedSize(feed.Id, len(items))
s.db.SyncSearch()
}
s.worker.FindFeedFavicon(*feed)
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"status": "success",
"feed": feed,
})
@@ -279,7 +278,7 @@ func (s *Server) handleFeed(c *router.Context) {
c.Out.WriteHeader(http.StatusBadRequest)
return
}
body := make(map[string]interface{})
body := make(map[string]any)
if err := json.NewDecoder(c.Req.Body).Decode(&body); err != nil {
log.Print(err)
c.Out.WriteHeader(http.StatusBadRequest)
@@ -391,7 +390,7 @@ func (s *Server) handleItemList(c *router.Context) {
items[i].Title = htmlutil.TruncateText(text, 140)
}
}
c.JSON(http.StatusOK, map[string]interface{}{
c.JSON(http.StatusOK, map[string]any{
"list": items,
"has_more": hasMore,
})
@@ -415,7 +414,7 @@ func (s *Server) handleSettings(c *router.Context) {
if c.Req.Method == "GET" {
c.JSON(http.StatusOK, s.db.GetSettings())
} else if c.Req.Method == "PUT" {
settings := make(map[string]interface{})
settings := make(map[string]any)
if err := json.NewDecoder(c.Req.Body).Decode(&settings); err != nil {
c.Out.WriteHeader(http.StatusBadRequest)
return
@@ -472,7 +471,6 @@ func (s *Server) handleOPMLExport(c *router.Context) {
feedsByFolderID := make(map[int64][]*storage.Feed)
for _, feed := range s.db.ListFeeds() {
feed := feed
if feed.FolderId == nil {
doc.Feeds = append(doc.Feeds, opml.Feed{
Title: feed.Title,

View File

@@ -16,7 +16,7 @@ type Server struct {
Addr string
db *storage.Storage
worker *worker.Worker
cache map[string]interface{}
cache map[string]any
cache_mutex *sync.Mutex
BasePath string
@@ -34,7 +34,7 @@ func NewServer(db *storage.Storage, addr string) *Server {
db: db,
Addr: addr,
worker: worker.NewWorker(db),
cache: make(map[string]interface{}),
cache: make(map[string]any),
cache_mutex: &sync.Mutex{},
}
}

View File

@@ -216,16 +216,3 @@ func (s *Storage) GetFeedErrors() map[int64]string {
}
return errors
}
func (s *Storage) SetFeedSize(feedId int64, size int) {
_, err := s.db.Exec(`
insert into feed_sizes (feed_id, size)
values (:feed_id, :size)
on conflict (feed_id) do update set size = excluded.size`,
sql.Named("feed_id", feedId),
sql.Named("size", size),
)
if err != nil {
log.Print(err)
}
}

View File

@@ -24,7 +24,7 @@ func TestCreateFeedSameLink(t *testing.T) {
t.Fatal("expected feed")
}
for i := 0; i < 10; i++ {
for range 10 {
db.CreateFeed("title", "", "", "http://example2.com/feed.xml", nil)
}

View File

@@ -135,14 +135,15 @@ func (s *Storage) CreateItems(items []Item) bool {
insert into items (
guid, feed_id, title, link, date,
content, media_links,
date_arrived, status
date_arrived, last_arrived, status
)
values (
:guid, :feed_id, :title, :link, strftime('%Y-%m-%d %H:%M:%f', :date),
:content, :media_links,
:date_arrived, :status
:date_arrived, :last_arrived, :status
)
on conflict (feed_id, guid) do nothing`,
on conflict (feed_id, guid) do update set
last_arrived = :last_arrived`,
sql.Named("guid", item.GUID),
sql.Named("feed_id", item.FeedId),
sql.Named("title", item.Title),
@@ -151,6 +152,7 @@ func (s *Storage) CreateItems(items []Item) bool {
sql.Named("content", item.Content),
sql.Named("media_links", item.MediaLinks),
sql.Named("date_arrived", now),
sql.Named("last_arrived", now),
sql.Named("status", UNREAD),
)
if err != nil {
@@ -169,9 +171,9 @@ func (s *Storage) CreateItems(items []Item) bool {
return true
}
func listQueryPredicate(filter ItemFilter, newestFirst bool) (string, []interface{}) {
func listQueryPredicate(filter ItemFilter, newestFirst bool) (string, []any) {
cond := make([]string, 0)
args := make([]interface{}, 0)
args := make([]any, 0)
if filter.FolderID != nil {
cond = append(cond, "i.feed_id in (select id from feeds where folder_id = :folder_id)")
args = append(args, sql.Named("folder_id", *filter.FolderID))
@@ -247,7 +249,7 @@ func (s *Storage) CountItems(filter ItemFilter) int {
var count int
query := fmt.Sprintf(`
select count(*)
from items
from items i
where %s
`, predicate)
err := s.db.QueryRow(query, args...).Scan(&count)
@@ -433,67 +435,35 @@ var (
//
// The rules:
// - Never delete starred entries.
// - Keep at least the same amount of articles the feed provides (default: 50).
// This prevents from deleting items for rarely updated and/or ever-growing
// feeds which might eventually reappear as unread.
// - Keep entries for a certain period (default: 90 days).
// - Keep at least 50 latest items for each feed.
// - Delete entries older than 90 days relative to the latest arrived item in the same feed.
func (s *Storage) DeleteOldItems() {
rows, err := s.db.Query(`
select
i.feed_id,
max(coalesce(s.size, 0), :keep_size) as max_items,
count(*) as num_items
from items i
left outer join feed_sizes s on s.feed_id = i.feed_id
where status != :starred_status
group by i.feed_id
`,
sql.Named("keep_size", itemsKeepSize),
result, err := s.db.Exec(`
delete from items
where id in (
select id
from (
select
id,
row_number() over (partition by feed_id order by date desc) as rn,
last_arrived,
max(last_arrived) over (partition by feed_id) as max_la
from items
where status != :starred_status
)
where rn > :keep_size
and last_arrived < datetime(max_la, :keep_days_limit)
)`,
sql.Named("starred_status", STARRED),
sql.Named("keep_size", itemsKeepSize),
sql.Named("keep_days_limit", fmt.Sprintf("-%d days", itemsKeepDays)),
)
if err != nil {
log.Print(err)
return
}
feedLimits := make(map[int64]int64, 0)
for rows.Next() {
var feedId, limit int64
rows.Scan(&feedId, &limit, nil)
feedLimits[feedId] = limit
}
for feedId, limit := range feedLimits {
result, err := s.db.Exec(
`
delete from items
where id in (
select i.id
from items i
where i.feed_id = :feed_id and status != :starred_status
order by date desc
limit -1 offset :limit
) and date_arrived < :date_limit
`,
sql.Named("feed_id", feedId),
sql.Named("starred_status", STARRED),
sql.Named("limit", limit),
sql.Named(
"date_limit",
time.Now().UTC().Add(-time.Hour*time.Duration(24*itemsKeepDays)),
),
)
if err != nil {
log.Print(err)
return
}
numDeleted, err := result.RowsAffected()
if err != nil {
log.Print(err)
return
}
if numDeleted > 0 {
log.Printf("Deleted %d old items (feed: %d)", numDeleted, feedId)
}
numDeleted, err := result.RowsAffected()
if err == nil && numDeleted > 0 {
log.Printf("Deleted %d old items", numDeleted)
}
}

View File

@@ -6,6 +6,7 @@ import (
"reflect"
"strconv"
"testing"
"testing/synctest"
"time"
)
@@ -320,57 +321,111 @@ func TestMarkItemsRead(t *testing.T) {
}
func TestDeleteOldItems(t *testing.T) {
extraItems := 10
now := time.Now().UTC()
db := testDB()
feed := db.CreateFeed("feed", "", "", "http://test.com/feed11.xml", nil)
starred := STARRED
items := make([]Item, 0)
for i := 0; i < itemsKeepSize+extraItems; i++ {
istr := strconv.Itoa(i)
items = append(items, Item{
GUID: istr,
FeedId: feed.Id,
Title: istr,
Date: now.Add(time.Hour * time.Duration(i)),
})
}
db.CreateItems(items)
t.Run("keeps at least 50 items", func(t *testing.T) {
db := testDB()
feed := db.CreateFeed("f", "", "", "http://f.xml", nil)
items := make([]Item, 100)
for i := range 100 {
items[i] = Item{GUID: strconv.Itoa(i), FeedId: feed.Id, Date: now.Add(time.Duration(i) * time.Hour * 24)}
}
db.CreateItems(items)
db.SetFeedSize(feed.Id, itemsKeepSize)
var feedSize int
err := db.db.QueryRow(
`select size from feed_sizes where feed_id = :feed_id`, sql.Named("feed_id", feed.Id),
).Scan(&feedSize)
if err != nil {
t.Fatal(err)
}
if feedSize != itemsKeepSize {
t.Fatalf(
"expected feed size to get updated\nwant: %d\nhave: %d",
itemsKeepSize+extraItems,
feedSize,
)
}
// // Set 1 recent (latest), 100 old (100 days ago)
db.db.Exec(`update items set last_arrived = :la where guid = "99"`, sql.Named("la", now))
db.db.Exec(`update items set last_arrived = :la where guid != "99"`, sql.Named("la", now.Add(-time.Hour*24*100)))
// expire only the first 3 articles
_, err = db.db.Exec(
`update items set date_arrived = :date_arrived
where id in (select id from items limit 3)`,
sql.Named("date_arrived", now.Add(-time.Hour*time.Duration(itemsKeepDays*24))),
)
if err != nil {
t.Fatal(err)
}
db.DeleteOldItems()
have := db.CountItems(ItemFilter{FeedID: &feed.Id})
if have != 50 {
t.Errorf("expected 50 items, have %d", have)
}
})
db.DeleteOldItems()
feedItems := db.ListItems(ItemFilter{FeedID: &feed.Id}, 1000, false, false)
if len(feedItems) != len(items)-3 {
t.Fatalf(
"invalid number of old items kept\nwant: %d\nhave: %d",
len(items)-3,
len(feedItems),
)
}
t.Run("keeps all less than 90 days old", func(t *testing.T) {
db := testDB()
feed := db.CreateFeed("f", "", "", "http://f.xml", nil)
items := make([]Item, 100)
for i := 0; i < 100; i++ {
items[i] = Item{GUID: strconv.Itoa(i), FeedId: feed.Id, Date: now.Add(time.Duration(i) * time.Second)}
}
db.CreateItems(items)
// Latest item at "now"
// All others at 80 days ago (keep)
db.db.Exec(`update items set last_arrived = :la where guid = "99"`, sql.Named("la", now))
db.db.Exec(`update items set last_arrived = :la where guid != "99"`, sql.Named("la", now.Add(-time.Hour*24*80)))
db.DeleteOldItems()
have := db.CountItems(ItemFilter{FeedID: &feed.Id})
if have != 100 {
t.Errorf("expected 100 items, have %d", have)
}
})
t.Run("keeps starred", func(t *testing.T) {
db := testDB()
feed := db.CreateFeed("f", "", "", "http://f.xml", nil)
items := make([]Item, 100)
for i := 0; i < 100; i++ {
items[i] = Item{GUID: strconv.Itoa(i), FeedId: feed.Id, Date: now.Add(time.Duration(i) * time.Second)}
}
db.CreateItems(items)
// Set all to 100 days ago, except one recent
db.db.Exec(`update items set last_arrived = :la`, sql.Named("la", now.Add(-time.Hour*24*100)))
db.db.Exec(`update items set last_arrived = :la where guid = "99"`, sql.Named("la", now))
// Star 10 old items that would otherwise be deleted (rn > 50 and old)
db.db.Exec(`update items set status = :s where cast(guid as integer) < 10`, sql.Named("s", starred))
db.DeleteOldItems()
have := db.CountItems(ItemFilter{FeedID: &feed.Id})
// 50 (limit) + 10 (starred) = 60 items should remain.
if have != 60 {
t.Errorf("expected 60 items, have %d", have)
}
})
}
func TestCreateItemsLastArrived(t *testing.T) {
synctest.Test(t, func(t *testing.T) {
db := testDB()
defer db.db.Close()
feed := db.CreateFeed("test feed", "", "", "http://example.com/feed", nil)
item := Item{
GUID: "item1",
FeedId: feed.Id,
Title: "Title 1",
Date: time.Now(),
}
// 1. Initial creation
db.CreateItems([]Item{item})
var lastArrived1 time.Time
err := db.db.QueryRow("select last_arrived from items where guid = ?", item.GUID).Scan(&lastArrived1)
if err != nil {
t.Fatal(err)
}
time.Sleep(time.Second * 10)
// 2. Update on conflict
db.CreateItems([]Item{item})
var lastArrived2 time.Time
err = db.db.QueryRow("select last_arrived from items where guid = ?", item.GUID).Scan(&lastArrived2)
if err != nil {
t.Fatal(err)
}
if !lastArrived2.After(lastArrived1) {
t.Errorf("expected last_arrived to be updated. old: %v, new: %v", lastArrived1, lastArrived2)
}
})
}

View File

@@ -18,6 +18,8 @@ var migrations = []func(*sql.Tx) error{
m08_normalize_datetime,
m09_change_item_index,
m10_add_item_medialinks,
m11_add_item_last_arrived,
m12_remove_feed_sizes,
}
var maxVersion = int64(len(migrations))
@@ -332,3 +334,14 @@ func m10_add_item_medialinks(tx *sql.Tx) error {
_, err := tx.Exec(sql)
return err
}
func m11_add_item_last_arrived(tx *sql.Tx) error {
sql := `alter table items add column last_arrived datetime`
_, err := tx.Exec(sql)
return err
}
func m12_remove_feed_sizes(tx *sql.Tx) error {
_, err := tx.Exec(`drop table if exists feed_sizes`)
return err
}

View File

@@ -6,8 +6,8 @@ import (
"log"
)
func settingsDefaults() map[string]interface{} {
return map[string]interface{}{
func settingsDefaults() map[string]any {
return map[string]any{
"filter": "",
"feed": "",
"feed_list_width": 300,
@@ -20,7 +20,7 @@ func settingsDefaults() map[string]interface{} {
}
}
func (s *Storage) GetSettingsValue(key string) interface{} {
func (s *Storage) GetSettingsValue(key string) any {
row := s.db.QueryRow(`select val from settings where key=:key`, sql.Named("key", key))
if row == nil {
return settingsDefaults()[key]
@@ -30,7 +30,7 @@ func (s *Storage) GetSettingsValue(key string) interface{} {
if len(val) == 0 {
return nil
}
var valDecoded interface{}
var valDecoded any
if err := json.Unmarshal([]byte(val), &valDecoded); err != nil {
log.Print(err)
return nil
@@ -48,7 +48,7 @@ func (s *Storage) GetSettingsValueInt64(key string) int64 {
return 0
}
func (s *Storage) GetSettings() map[string]interface{} {
func (s *Storage) GetSettings() map[string]any {
result := settingsDefaults()
rows, err := s.db.Query(`select key, val from settings;`)
if err != nil {
@@ -58,7 +58,7 @@ func (s *Storage) GetSettings() map[string]interface{} {
for rows.Next() {
var key string
var val []byte
var valDecoded interface{}
var valDecoded any
rows.Scan(&key, &val)
if err = json.Unmarshal([]byte(val), &valDecoded); err != nil {
@@ -70,7 +70,7 @@ func (s *Storage) GetSettings() map[string]interface{} {
return result
}
func (s *Storage) UpdateSettings(kv map[string]interface{}) bool {
func (s *Storage) UpdateSettings(kv map[string]any) bool {
defaults := settingsDefaults()
for key, val := range kv {
if defaults[key] == nil {

View File

@@ -141,7 +141,6 @@ func findFavicon(siteUrl, feedUrl string) (*[]byte, error) {
func ConvertItems(items []parser.Item, feed storage.Feed) []storage.Item {
result := make([]storage.Item, len(items))
for i, item := range items {
item := item
mediaLinks := make(storage.MediaLinks, 0)
for _, link := range item.MediaLinks {
mediaLinks = append(mediaLinks, storage.MediaLink(link))

View File

@@ -113,18 +113,17 @@ func (w *Worker) refresher(feeds []storage.Feed) {
srcqueue := make(chan storage.Feed, len(feeds))
dstqueue := make(chan []storage.Item)
for i := 0; i < NUM_WORKERS; i++ {
for range NUM_WORKERS {
go w.worker(srcqueue, dstqueue)
}
for _, feed := range feeds {
srcqueue <- feed
}
for i := 0; i < len(feeds); i++ {
for range feeds {
items := <-dstqueue
if len(items) > 0 {
w.db.CreateItems(items)
w.db.SetFeedSize(items[0].FeedId, len(items))
}
atomic.AddInt32(w.pending, -1)
w.db.SyncSearch()