This commit is contained in:
Nazar Kanaev 2021-03-19 00:06:48 +00:00
parent 391ce61362
commit 9f376db0f4
24 changed files with 65 additions and 67 deletions

View File

@ -1,12 +1,12 @@
package main
import (
"io/ioutil"
"flag"
"io/ioutil"
"strings"
)
var rsrc = `1 VERSIONINFO
var rsrc = `1 VERSIONINFO
FILEVERSION {VERSION_COMMA},0,0
PRODUCTVERSION {VERSION_COMMA},0,0
BEGIN

View File

@ -85,9 +85,9 @@ func main() {
for _, res := range []int{1024, 512, 256, 128, 64, 32, 16} {
outfile := fmt.Sprintf("icon_%dx%d.png", res, res)
if res == 1024 || res == 64 {
outfile = fmt.Sprintf("icon_%dx%d@2x.png", res / 2, res / 2)
outfile = fmt.Sprintf("icon_%dx%d@2x.png", res/2, res/2)
}
cmd := []string {
cmd := []string{
"sips", "-s", "format", "png", "--resampleWidth", strconv.Itoa(res),
iconFile, "--out", path.Join(iconsetDir, outfile),
}

View File

@ -11,5 +11,5 @@ import "embed"
var embedded embed.FS
func init() {
FS.embedded = &embedded
FS.embedded = &embedded
}

View File

@ -33,10 +33,10 @@ func Authenticate(rw http.ResponseWriter, username, password, basepath string) {
func Logout(rw http.ResponseWriter, basepath string) {
http.SetCookie(rw, &http.Cookie{
Name: "auth",
Value: "",
MaxAge: -1,
Path: basepath,
Name: "auth",
Value: "",
MaxAge: -1,
Path: basepath,
})
}

View File

@ -20,7 +20,7 @@ func unsafeMethod(method string) bool {
}
func (m *Middleware) Handler(c *router.Context) {
if strings.HasPrefix(c.Req.URL.Path, m.BasePath + m.Public) {
if strings.HasPrefix(c.Req.URL.Path, m.BasePath+m.Public) {
c.Next()
return
}
@ -46,7 +46,7 @@ func (m *Middleware) Handler(c *router.Context) {
} else {
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]string{
"username": username,
"error": "Invalid username/password",
"error": "Invalid username/password",
})
return
}

View File

@ -1,8 +1,8 @@
package crawler
import (
"testing"
"reflect"
"testing"
)
const base = "http://example.com"
@ -11,7 +11,7 @@ func TestFindFeedsInvalidHTML(t *testing.T) {
x := `some nonsense`
r := FindFeeds(x, base)
if len(r) != 0 {
t.Fatal("not expecting results")
t.Fatal("not expecting results")
}
}
@ -34,8 +34,8 @@ func TestFindFeedsLinks(t *testing.T) {
have := FindFeeds(x, base)
want := map[string]string{
base + "/feed.xml": "rss with title",
base + "/atom.xml": "",
base + "/feed.xml": "rss with title",
base + "/atom.xml": "",
base + "/feed.json": "",
}
if !reflect.DeepEqual(have, want) {
@ -63,7 +63,7 @@ func TestFindFeedsGuess(t *testing.T) {
have := FindFeeds(body, base)
want := map[string]string{
base + "/feed.xml": "",
base + "/news": "",
base + "/news": "",
}
if !reflect.DeepEqual(want, have) {
t.Logf("want: %#v", want)

View File

@ -19,7 +19,7 @@ var GitHash string = "unknown"
func main() {
log.SetOutput(os.Stdout)
log.SetFlags(log.Ldate|log.Ltime|log.Lshortfile)
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
var addr, db, authfile, certfile, keyfile string
var ver, open bool

View File

@ -1,9 +1,9 @@
package opml
import (
"strings"
"html"
"fmt"
"html"
"strings"
)
type Folder struct {
@ -36,7 +36,7 @@ func (f Folder) outline(level int) string {
prefix := strings.Repeat(indent, level)
if level > 0 {
builder.WriteString(prefix + fmt.Sprintf(`<outline text="%s">` + nl, e(f.Title)))
builder.WriteString(prefix + fmt.Sprintf(`<outline text="%s">`+nl, e(f.Title)))
}
for _, folder := range f.Folders {
builder.WriteString(folder.outline(level + 1))
@ -52,7 +52,7 @@ func (f Folder) outline(level int) string {
func (f Feed) outline(level int) string {
return strings.Repeat(indent, level) + fmt.Sprintf(
`<outline type="rss" text="%s" xmlUrl="%s" htmlUrl="%s"/>` + nl,
`<outline type="rss" text="%s" xmlUrl="%s" htmlUrl="%s"/>`+nl,
e(f.Title), e(f.FeedUrl), e(f.SiteUrl),
)
}

View File

@ -5,28 +5,27 @@ import (
"testing"
)
func TestOPML(t *testing.T) {
have := (Folder{
Title: "",
Feeds: []Feed{
Feed{
Title: "title1",
{
Title: "title1",
FeedUrl: "https://baz.com/feed.xml",
SiteUrl: "https://baz.com/",
},
},
Folders: []Folder{
Folder{
{
Title: "sub",
Feeds: []Feed{
Feed{
Title: "subtitle1",
{
Title: "subtitle1",
FeedUrl: "https://foo.com/feed.xml",
SiteUrl: "https://foo.com/",
},
Feed{
Title: "&>",
{
Title: "&>",
FeedUrl: "https://bar.com/feed.xml",
SiteUrl: "https://bar.com/",
},

View File

@ -11,11 +11,11 @@ type opml struct {
}
type outline struct {
Type string `xml:"type,attr,omitempty"`
Title string `xml:"text,attr"`
FeedUrl string `xml:"xmlUrl,attr,omitempty"`
SiteUrl string `xml:"htmlUrl,attr,omitempty"`
Outlines []outline `xml:"outline,omitempty"`
Type string `xml:"type,attr,omitempty"`
Title string `xml:"text,attr"`
FeedUrl string `xml:"xmlUrl,attr,omitempty"`
SiteUrl string `xml:"htmlUrl,attr,omitempty"`
Outlines []outline `xml:"outline,omitempty"`
}
func buildFolder(title string, outlines []outline) Folder {

View File

@ -6,7 +6,6 @@ import (
"testing"
)
func TestParse(t *testing.T) {
have, _ := Parse(strings.NewReader(`
<?xml version="1.0" encoding="UTF-8"?>
@ -27,30 +26,30 @@ func TestParse(t *testing.T) {
want := Folder{
Title: "",
Feeds: []Feed{
Feed{
Title: "title1",
{
Title: "title1",
FeedUrl: "https://baz.com/feed.xml",
SiteUrl: "https://baz.com/",
},
},
Folders: []Folder{
Folder{
{
Title: "sub",
Feeds: []Feed{
Feed{
Title: "subtitle1",
{
Title: "subtitle1",
FeedUrl: "https://foo.com/feed.xml",
SiteUrl: "https://foo.com/",
},
Feed{
Title: "&>",
{
Title: "&>",
FeedUrl: "https://bar.com/feed.xml",
SiteUrl: "https://bar.com/",
},
},
},
},
}
}
if !reflect.DeepEqual(want, have) {
t.Logf("want: %#v", want)
t.Logf("have: %#v", have)

View File

@ -3,8 +3,8 @@
package platform
import (
"github.com/nkanaev/yarr/src/systray"
"github.com/nkanaev/yarr/src/server"
"github.com/nkanaev/yarr/src/systray"
)
func Start(s *server.Handler) {

View File

@ -10,10 +10,10 @@ import (
)
type Context struct {
Req *http.Request
Out http.ResponseWriter
Req *http.Request
Out http.ResponseWriter
Vars map[string]string
Vars map[string]string
chain []Handler
index int

View File

@ -55,7 +55,7 @@ func TestRouteRegexpStar(t *testing.T) {
func TestRegexGroupsPart(t *testing.T) {
re := routeRegexp("/foo/:bar/1/:baz")
expect := map[string]string{"bar": "one", "baz": "two"}
actual := regexGroups("/foo/one/1/two", re)
@ -66,7 +66,7 @@ func TestRegexGroupsPart(t *testing.T) {
func TestRegexGroupsStar(t *testing.T) {
re := routeRegexp("/foo/*bar")
expect := map[string]string{"bar": "bar/baz/"}
actual := regexGroups("/foo/bar/baz/", re)

View File

@ -51,7 +51,7 @@ func (r *Router) resolve(path string) *Route {
func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
// autoclose open base url
if r.base != "" && r.base == req.URL.Path {
http.Redirect(rw, req, r.base + "/", http.StatusFound)
http.Redirect(rw, req, r.base+"/", http.StatusFound)
return
}

View File

@ -361,7 +361,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
feed := feed
if feed.FolderId == nil {
doc.Feeds = append(doc.Feeds, opml.Feed{
Title: feed.Title,
Title: feed.Title,
FeedUrl: feed.FeedLink,
SiteUrl: feed.Link,
})
@ -370,7 +370,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
feedsByFolderID[id] = append(feedsByFolderID[id], &feed)
}
}
for _, folder := range s.db.ListFolders() {
folderFeeds := feedsByFolderID[folder.Id]
if len(folderFeeds) == 0 {
@ -379,7 +379,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
opmlfolder := opml.Folder{Title: folder.Title}
for _, feed := range folderFeeds {
opmlfolder.Feeds = append(opmlfolder.Feeds, opml.Feed{
Title: feed.Title,
Title: feed.Title,
FeedUrl: feed.FeedLink,
SiteUrl: feed.Link,
})

View File

@ -11,9 +11,9 @@ import (
var BasePath string = ""
type Server struct {
Addr string
db *storage.Storage
worker *worker.Worker
Addr string
db *storage.Storage
worker *worker.Worker
// auth
Username string
Password string

View File

@ -1,8 +1,8 @@
package storage
import (
"log"
"html"
"log"
"net/url"
)

View File

@ -14,7 +14,7 @@ type HTTPState struct {
}
func (s *Storage) ListHTTPStates() map[int64]HTTPState {
result := make(map[int64]HTTPState)
result := make(map[int64]HTTPState)
rows, err := s.db.Query(`select feed_id, last_refreshed, last_modified, etag from http_states`)
if err != nil {
log.Print(err)

View File

@ -3,11 +3,11 @@ package storage
import (
"encoding/json"
"fmt"
xhtml "golang.org/x/net/html"
"html"
"log"
"strings"
"time"
xhtml "golang.org/x/net/html"
)
type ItemStatus int

View File

@ -6,7 +6,7 @@ import (
"log"
)
var migrations = []func(*sql.Tx)error{
var migrations = []func(*sql.Tx) error{
m01_initial,
m02_feed_states_and_errors,
m03_on_delete_actions,
@ -17,7 +17,7 @@ var maxVersion = int64(len(migrations))
func migrate(db *sql.DB) error {
var version int64
db.QueryRow("pragma user_version").Scan(&version);
db.QueryRow("pragma user_version").Scan(&version)
if version >= maxVersion {
return nil
@ -29,7 +29,7 @@ func migrate(db *sql.DB) error {
// Migrations altering schema using a sequence of steps due to SQLite limitations.
// Must come with `pragma foreign_key_check` at the end. See:
// "Making Other Kinds Of Table Schema Changes"
// https://www.sqlite.org/lang_altertable.html
// https://www.sqlite.org/lang_altertable.html
trickyAlteration := (v == 3)
log.Printf("[migration:%d] starting", v)
@ -56,7 +56,7 @@ func migrate(db *sql.DB) error {
func migrateVersion(v int64, db *sql.DB) error {
var err error
var tx *sql.Tx
migratefunc := migrations[v - 1]
migratefunc := migrations[v-1]
if tx, err = db.Begin(); err != nil {
log.Printf("[migration:%d] failed to start transaction", v)
return err

View File

@ -6,7 +6,7 @@ import (
)
type Storage struct {
db *sql.DB
db *sql.DB
}
func New(path string) (*Storage, error) {

View File

@ -5,8 +5,8 @@ import (
"errors"
"fmt"
"github.com/mmcdole/gofeed"
"github.com/nkanaev/yarr/src/storage"
"github.com/nkanaev/yarr/src/crawler"
"github.com/nkanaev/yarr/src/storage"
"io/ioutil"
"net"
"net/http"

View File

@ -3,8 +3,8 @@ package worker
import (
"github.com/nkanaev/yarr/src/storage"
"log"
"sync/atomic"
"runtime"
"sync/atomic"
"time"
)
@ -19,7 +19,7 @@ type Worker struct {
func NewWorker(db *storage.Storage) *Worker {
queueSize := int32(0)
return &Worker{
db: db,
db: db,
feedQueue: make(chan storage.Feed, 3000),
queueSize: &queueSize,
refreshRate: make(chan int64),