mirror of
https://github.com/nkanaev/yarr.git
synced 2025-05-24 00:33:14 +00:00
gofmt
This commit is contained in:
parent
391ce61362
commit
9f376db0f4
@ -1,12 +1,12 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io/ioutil"
|
|
||||||
"flag"
|
"flag"
|
||||||
|
"io/ioutil"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var rsrc = `1 VERSIONINFO
|
var rsrc = `1 VERSIONINFO
|
||||||
FILEVERSION {VERSION_COMMA},0,0
|
FILEVERSION {VERSION_COMMA},0,0
|
||||||
PRODUCTVERSION {VERSION_COMMA},0,0
|
PRODUCTVERSION {VERSION_COMMA},0,0
|
||||||
BEGIN
|
BEGIN
|
||||||
|
@ -85,9 +85,9 @@ func main() {
|
|||||||
for _, res := range []int{1024, 512, 256, 128, 64, 32, 16} {
|
for _, res := range []int{1024, 512, 256, 128, 64, 32, 16} {
|
||||||
outfile := fmt.Sprintf("icon_%dx%d.png", res, res)
|
outfile := fmt.Sprintf("icon_%dx%d.png", res, res)
|
||||||
if res == 1024 || res == 64 {
|
if res == 1024 || res == 64 {
|
||||||
outfile = fmt.Sprintf("icon_%dx%d@2x.png", res / 2, res / 2)
|
outfile = fmt.Sprintf("icon_%dx%d@2x.png", res/2, res/2)
|
||||||
}
|
}
|
||||||
cmd := []string {
|
cmd := []string{
|
||||||
"sips", "-s", "format", "png", "--resampleWidth", strconv.Itoa(res),
|
"sips", "-s", "format", "png", "--resampleWidth", strconv.Itoa(res),
|
||||||
iconFile, "--out", path.Join(iconsetDir, outfile),
|
iconFile, "--out", path.Join(iconsetDir, outfile),
|
||||||
}
|
}
|
||||||
|
@ -11,5 +11,5 @@ import "embed"
|
|||||||
var embedded embed.FS
|
var embedded embed.FS
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
FS.embedded = &embedded
|
FS.embedded = &embedded
|
||||||
}
|
}
|
||||||
|
@ -33,10 +33,10 @@ func Authenticate(rw http.ResponseWriter, username, password, basepath string) {
|
|||||||
|
|
||||||
func Logout(rw http.ResponseWriter, basepath string) {
|
func Logout(rw http.ResponseWriter, basepath string) {
|
||||||
http.SetCookie(rw, &http.Cookie{
|
http.SetCookie(rw, &http.Cookie{
|
||||||
Name: "auth",
|
Name: "auth",
|
||||||
Value: "",
|
Value: "",
|
||||||
MaxAge: -1,
|
MaxAge: -1,
|
||||||
Path: basepath,
|
Path: basepath,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ func unsafeMethod(method string) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (m *Middleware) Handler(c *router.Context) {
|
func (m *Middleware) Handler(c *router.Context) {
|
||||||
if strings.HasPrefix(c.Req.URL.Path, m.BasePath + m.Public) {
|
if strings.HasPrefix(c.Req.URL.Path, m.BasePath+m.Public) {
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -46,7 +46,7 @@ func (m *Middleware) Handler(c *router.Context) {
|
|||||||
} else {
|
} else {
|
||||||
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]string{
|
c.HTML(http.StatusOK, assets.Template("login.html"), map[string]string{
|
||||||
"username": username,
|
"username": username,
|
||||||
"error": "Invalid username/password",
|
"error": "Invalid username/password",
|
||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package crawler
|
package crawler
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
const base = "http://example.com"
|
const base = "http://example.com"
|
||||||
@ -11,7 +11,7 @@ func TestFindFeedsInvalidHTML(t *testing.T) {
|
|||||||
x := `some nonsense`
|
x := `some nonsense`
|
||||||
r := FindFeeds(x, base)
|
r := FindFeeds(x, base)
|
||||||
if len(r) != 0 {
|
if len(r) != 0 {
|
||||||
t.Fatal("not expecting results")
|
t.Fatal("not expecting results")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,8 +34,8 @@ func TestFindFeedsLinks(t *testing.T) {
|
|||||||
have := FindFeeds(x, base)
|
have := FindFeeds(x, base)
|
||||||
|
|
||||||
want := map[string]string{
|
want := map[string]string{
|
||||||
base + "/feed.xml": "rss with title",
|
base + "/feed.xml": "rss with title",
|
||||||
base + "/atom.xml": "",
|
base + "/atom.xml": "",
|
||||||
base + "/feed.json": "",
|
base + "/feed.json": "",
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(have, want) {
|
if !reflect.DeepEqual(have, want) {
|
||||||
@ -63,7 +63,7 @@ func TestFindFeedsGuess(t *testing.T) {
|
|||||||
have := FindFeeds(body, base)
|
have := FindFeeds(body, base)
|
||||||
want := map[string]string{
|
want := map[string]string{
|
||||||
base + "/feed.xml": "",
|
base + "/feed.xml": "",
|
||||||
base + "/news": "",
|
base + "/news": "",
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(want, have) {
|
if !reflect.DeepEqual(want, have) {
|
||||||
t.Logf("want: %#v", want)
|
t.Logf("want: %#v", want)
|
||||||
|
@ -19,7 +19,7 @@ var GitHash string = "unknown"
|
|||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
log.SetOutput(os.Stdout)
|
log.SetOutput(os.Stdout)
|
||||||
log.SetFlags(log.Ldate|log.Ltime|log.Lshortfile)
|
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
|
||||||
|
|
||||||
var addr, db, authfile, certfile, keyfile string
|
var addr, db, authfile, certfile, keyfile string
|
||||||
var ver, open bool
|
var ver, open bool
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package opml
|
package opml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"html"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"html"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Folder struct {
|
type Folder struct {
|
||||||
@ -36,7 +36,7 @@ func (f Folder) outline(level int) string {
|
|||||||
prefix := strings.Repeat(indent, level)
|
prefix := strings.Repeat(indent, level)
|
||||||
|
|
||||||
if level > 0 {
|
if level > 0 {
|
||||||
builder.WriteString(prefix + fmt.Sprintf(`<outline text="%s">` + nl, e(f.Title)))
|
builder.WriteString(prefix + fmt.Sprintf(`<outline text="%s">`+nl, e(f.Title)))
|
||||||
}
|
}
|
||||||
for _, folder := range f.Folders {
|
for _, folder := range f.Folders {
|
||||||
builder.WriteString(folder.outline(level + 1))
|
builder.WriteString(folder.outline(level + 1))
|
||||||
@ -52,7 +52,7 @@ func (f Folder) outline(level int) string {
|
|||||||
|
|
||||||
func (f Feed) outline(level int) string {
|
func (f Feed) outline(level int) string {
|
||||||
return strings.Repeat(indent, level) + fmt.Sprintf(
|
return strings.Repeat(indent, level) + fmt.Sprintf(
|
||||||
`<outline type="rss" text="%s" xmlUrl="%s" htmlUrl="%s"/>` + nl,
|
`<outline type="rss" text="%s" xmlUrl="%s" htmlUrl="%s"/>`+nl,
|
||||||
e(f.Title), e(f.FeedUrl), e(f.SiteUrl),
|
e(f.Title), e(f.FeedUrl), e(f.SiteUrl),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -5,28 +5,27 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
func TestOPML(t *testing.T) {
|
func TestOPML(t *testing.T) {
|
||||||
have := (Folder{
|
have := (Folder{
|
||||||
Title: "",
|
Title: "",
|
||||||
Feeds: []Feed{
|
Feeds: []Feed{
|
||||||
Feed{
|
{
|
||||||
Title: "title1",
|
Title: "title1",
|
||||||
FeedUrl: "https://baz.com/feed.xml",
|
FeedUrl: "https://baz.com/feed.xml",
|
||||||
SiteUrl: "https://baz.com/",
|
SiteUrl: "https://baz.com/",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Folders: []Folder{
|
Folders: []Folder{
|
||||||
Folder{
|
{
|
||||||
Title: "sub",
|
Title: "sub",
|
||||||
Feeds: []Feed{
|
Feeds: []Feed{
|
||||||
Feed{
|
{
|
||||||
Title: "subtitle1",
|
Title: "subtitle1",
|
||||||
FeedUrl: "https://foo.com/feed.xml",
|
FeedUrl: "https://foo.com/feed.xml",
|
||||||
SiteUrl: "https://foo.com/",
|
SiteUrl: "https://foo.com/",
|
||||||
},
|
},
|
||||||
Feed{
|
{
|
||||||
Title: "&>",
|
Title: "&>",
|
||||||
FeedUrl: "https://bar.com/feed.xml",
|
FeedUrl: "https://bar.com/feed.xml",
|
||||||
SiteUrl: "https://bar.com/",
|
SiteUrl: "https://bar.com/",
|
||||||
},
|
},
|
||||||
|
@ -11,11 +11,11 @@ type opml struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type outline struct {
|
type outline struct {
|
||||||
Type string `xml:"type,attr,omitempty"`
|
Type string `xml:"type,attr,omitempty"`
|
||||||
Title string `xml:"text,attr"`
|
Title string `xml:"text,attr"`
|
||||||
FeedUrl string `xml:"xmlUrl,attr,omitempty"`
|
FeedUrl string `xml:"xmlUrl,attr,omitempty"`
|
||||||
SiteUrl string `xml:"htmlUrl,attr,omitempty"`
|
SiteUrl string `xml:"htmlUrl,attr,omitempty"`
|
||||||
Outlines []outline `xml:"outline,omitempty"`
|
Outlines []outline `xml:"outline,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildFolder(title string, outlines []outline) Folder {
|
func buildFolder(title string, outlines []outline) Folder {
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
func TestParse(t *testing.T) {
|
func TestParse(t *testing.T) {
|
||||||
have, _ := Parse(strings.NewReader(`
|
have, _ := Parse(strings.NewReader(`
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
@ -27,30 +26,30 @@ func TestParse(t *testing.T) {
|
|||||||
want := Folder{
|
want := Folder{
|
||||||
Title: "",
|
Title: "",
|
||||||
Feeds: []Feed{
|
Feeds: []Feed{
|
||||||
Feed{
|
{
|
||||||
Title: "title1",
|
Title: "title1",
|
||||||
FeedUrl: "https://baz.com/feed.xml",
|
FeedUrl: "https://baz.com/feed.xml",
|
||||||
SiteUrl: "https://baz.com/",
|
SiteUrl: "https://baz.com/",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Folders: []Folder{
|
Folders: []Folder{
|
||||||
Folder{
|
{
|
||||||
Title: "sub",
|
Title: "sub",
|
||||||
Feeds: []Feed{
|
Feeds: []Feed{
|
||||||
Feed{
|
{
|
||||||
Title: "subtitle1",
|
Title: "subtitle1",
|
||||||
FeedUrl: "https://foo.com/feed.xml",
|
FeedUrl: "https://foo.com/feed.xml",
|
||||||
SiteUrl: "https://foo.com/",
|
SiteUrl: "https://foo.com/",
|
||||||
},
|
},
|
||||||
Feed{
|
{
|
||||||
Title: "&>",
|
Title: "&>",
|
||||||
FeedUrl: "https://bar.com/feed.xml",
|
FeedUrl: "https://bar.com/feed.xml",
|
||||||
SiteUrl: "https://bar.com/",
|
SiteUrl: "https://bar.com/",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(want, have) {
|
if !reflect.DeepEqual(want, have) {
|
||||||
t.Logf("want: %#v", want)
|
t.Logf("want: %#v", want)
|
||||||
t.Logf("have: %#v", have)
|
t.Logf("have: %#v", have)
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
package platform
|
package platform
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/nkanaev/yarr/src/systray"
|
|
||||||
"github.com/nkanaev/yarr/src/server"
|
"github.com/nkanaev/yarr/src/server"
|
||||||
|
"github.com/nkanaev/yarr/src/systray"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Start(s *server.Handler) {
|
func Start(s *server.Handler) {
|
||||||
|
@ -10,10 +10,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Context struct {
|
type Context struct {
|
||||||
Req *http.Request
|
Req *http.Request
|
||||||
Out http.ResponseWriter
|
Out http.ResponseWriter
|
||||||
|
|
||||||
Vars map[string]string
|
Vars map[string]string
|
||||||
|
|
||||||
chain []Handler
|
chain []Handler
|
||||||
index int
|
index int
|
||||||
|
@ -55,7 +55,7 @@ func TestRouteRegexpStar(t *testing.T) {
|
|||||||
|
|
||||||
func TestRegexGroupsPart(t *testing.T) {
|
func TestRegexGroupsPart(t *testing.T) {
|
||||||
re := routeRegexp("/foo/:bar/1/:baz")
|
re := routeRegexp("/foo/:bar/1/:baz")
|
||||||
|
|
||||||
expect := map[string]string{"bar": "one", "baz": "two"}
|
expect := map[string]string{"bar": "one", "baz": "two"}
|
||||||
actual := regexGroups("/foo/one/1/two", re)
|
actual := regexGroups("/foo/one/1/two", re)
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ func TestRegexGroupsPart(t *testing.T) {
|
|||||||
|
|
||||||
func TestRegexGroupsStar(t *testing.T) {
|
func TestRegexGroupsStar(t *testing.T) {
|
||||||
re := routeRegexp("/foo/*bar")
|
re := routeRegexp("/foo/*bar")
|
||||||
|
|
||||||
expect := map[string]string{"bar": "bar/baz/"}
|
expect := map[string]string{"bar": "bar/baz/"}
|
||||||
actual := regexGroups("/foo/bar/baz/", re)
|
actual := regexGroups("/foo/bar/baz/", re)
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ func (r *Router) resolve(path string) *Route {
|
|||||||
func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||||
// autoclose open base url
|
// autoclose open base url
|
||||||
if r.base != "" && r.base == req.URL.Path {
|
if r.base != "" && r.base == req.URL.Path {
|
||||||
http.Redirect(rw, req, r.base + "/", http.StatusFound)
|
http.Redirect(rw, req, r.base+"/", http.StatusFound)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -361,7 +361,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
|
|||||||
feed := feed
|
feed := feed
|
||||||
if feed.FolderId == nil {
|
if feed.FolderId == nil {
|
||||||
doc.Feeds = append(doc.Feeds, opml.Feed{
|
doc.Feeds = append(doc.Feeds, opml.Feed{
|
||||||
Title: feed.Title,
|
Title: feed.Title,
|
||||||
FeedUrl: feed.FeedLink,
|
FeedUrl: feed.FeedLink,
|
||||||
SiteUrl: feed.Link,
|
SiteUrl: feed.Link,
|
||||||
})
|
})
|
||||||
@ -370,7 +370,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
|
|||||||
feedsByFolderID[id] = append(feedsByFolderID[id], &feed)
|
feedsByFolderID[id] = append(feedsByFolderID[id], &feed)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, folder := range s.db.ListFolders() {
|
for _, folder := range s.db.ListFolders() {
|
||||||
folderFeeds := feedsByFolderID[folder.Id]
|
folderFeeds := feedsByFolderID[folder.Id]
|
||||||
if len(folderFeeds) == 0 {
|
if len(folderFeeds) == 0 {
|
||||||
@ -379,7 +379,7 @@ func (s *Server) handleOPMLExport(c *router.Context) {
|
|||||||
opmlfolder := opml.Folder{Title: folder.Title}
|
opmlfolder := opml.Folder{Title: folder.Title}
|
||||||
for _, feed := range folderFeeds {
|
for _, feed := range folderFeeds {
|
||||||
opmlfolder.Feeds = append(opmlfolder.Feeds, opml.Feed{
|
opmlfolder.Feeds = append(opmlfolder.Feeds, opml.Feed{
|
||||||
Title: feed.Title,
|
Title: feed.Title,
|
||||||
FeedUrl: feed.FeedLink,
|
FeedUrl: feed.FeedLink,
|
||||||
SiteUrl: feed.Link,
|
SiteUrl: feed.Link,
|
||||||
})
|
})
|
||||||
|
@ -11,9 +11,9 @@ import (
|
|||||||
var BasePath string = ""
|
var BasePath string = ""
|
||||||
|
|
||||||
type Server struct {
|
type Server struct {
|
||||||
Addr string
|
Addr string
|
||||||
db *storage.Storage
|
db *storage.Storage
|
||||||
worker *worker.Worker
|
worker *worker.Worker
|
||||||
// auth
|
// auth
|
||||||
Username string
|
Username string
|
||||||
Password string
|
Password string
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package storage
|
package storage
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
|
||||||
"html"
|
"html"
|
||||||
|
"log"
|
||||||
"net/url"
|
"net/url"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ type HTTPState struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Storage) ListHTTPStates() map[int64]HTTPState {
|
func (s *Storage) ListHTTPStates() map[int64]HTTPState {
|
||||||
result := make(map[int64]HTTPState)
|
result := make(map[int64]HTTPState)
|
||||||
rows, err := s.db.Query(`select feed_id, last_refreshed, last_modified, etag from http_states`)
|
rows, err := s.db.Query(`select feed_id, last_refreshed, last_modified, etag from http_states`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Print(err)
|
log.Print(err)
|
||||||
|
@ -3,11 +3,11 @@ package storage
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
xhtml "golang.org/x/net/html"
|
||||||
"html"
|
"html"
|
||||||
"log"
|
"log"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
xhtml "golang.org/x/net/html"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type ItemStatus int
|
type ItemStatus int
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
"log"
|
"log"
|
||||||
)
|
)
|
||||||
|
|
||||||
var migrations = []func(*sql.Tx)error{
|
var migrations = []func(*sql.Tx) error{
|
||||||
m01_initial,
|
m01_initial,
|
||||||
m02_feed_states_and_errors,
|
m02_feed_states_and_errors,
|
||||||
m03_on_delete_actions,
|
m03_on_delete_actions,
|
||||||
@ -17,7 +17,7 @@ var maxVersion = int64(len(migrations))
|
|||||||
|
|
||||||
func migrate(db *sql.DB) error {
|
func migrate(db *sql.DB) error {
|
||||||
var version int64
|
var version int64
|
||||||
db.QueryRow("pragma user_version").Scan(&version);
|
db.QueryRow("pragma user_version").Scan(&version)
|
||||||
|
|
||||||
if version >= maxVersion {
|
if version >= maxVersion {
|
||||||
return nil
|
return nil
|
||||||
@ -29,7 +29,7 @@ func migrate(db *sql.DB) error {
|
|||||||
// Migrations altering schema using a sequence of steps due to SQLite limitations.
|
// Migrations altering schema using a sequence of steps due to SQLite limitations.
|
||||||
// Must come with `pragma foreign_key_check` at the end. See:
|
// Must come with `pragma foreign_key_check` at the end. See:
|
||||||
// "Making Other Kinds Of Table Schema Changes"
|
// "Making Other Kinds Of Table Schema Changes"
|
||||||
// https://www.sqlite.org/lang_altertable.html
|
// https://www.sqlite.org/lang_altertable.html
|
||||||
trickyAlteration := (v == 3)
|
trickyAlteration := (v == 3)
|
||||||
|
|
||||||
log.Printf("[migration:%d] starting", v)
|
log.Printf("[migration:%d] starting", v)
|
||||||
@ -56,7 +56,7 @@ func migrate(db *sql.DB) error {
|
|||||||
func migrateVersion(v int64, db *sql.DB) error {
|
func migrateVersion(v int64, db *sql.DB) error {
|
||||||
var err error
|
var err error
|
||||||
var tx *sql.Tx
|
var tx *sql.Tx
|
||||||
migratefunc := migrations[v - 1]
|
migratefunc := migrations[v-1]
|
||||||
if tx, err = db.Begin(); err != nil {
|
if tx, err = db.Begin(); err != nil {
|
||||||
log.Printf("[migration:%d] failed to start transaction", v)
|
log.Printf("[migration:%d] failed to start transaction", v)
|
||||||
return err
|
return err
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Storage struct {
|
type Storage struct {
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(path string) (*Storage, error) {
|
func New(path string) (*Storage, error) {
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/mmcdole/gofeed"
|
"github.com/mmcdole/gofeed"
|
||||||
"github.com/nkanaev/yarr/src/storage"
|
|
||||||
"github.com/nkanaev/yarr/src/crawler"
|
"github.com/nkanaev/yarr/src/crawler"
|
||||||
|
"github.com/nkanaev/yarr/src/storage"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
@ -3,8 +3,8 @@ package worker
|
|||||||
import (
|
import (
|
||||||
"github.com/nkanaev/yarr/src/storage"
|
"github.com/nkanaev/yarr/src/storage"
|
||||||
"log"
|
"log"
|
||||||
"sync/atomic"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ type Worker struct {
|
|||||||
func NewWorker(db *storage.Storage) *Worker {
|
func NewWorker(db *storage.Storage) *Worker {
|
||||||
queueSize := int32(0)
|
queueSize := int32(0)
|
||||||
return &Worker{
|
return &Worker{
|
||||||
db: db,
|
db: db,
|
||||||
feedQueue: make(chan storage.Feed, 3000),
|
feedQueue: make(chan storage.Feed, 3000),
|
||||||
queueSize: &queueSize,
|
queueSize: &queueSize,
|
||||||
refreshRate: make(chan int64),
|
refreshRate: make(chan int64),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user