move packages to src

This commit is contained in:
Nazar Kanaev
2021-02-26 13:39:30 +00:00
parent d825ce9bdf
commit 3fac9bb1bd
71 changed files with 0 additions and 0 deletions

60
src/server/auth.go Normal file
View File

@@ -0,0 +1,60 @@
package server
import (
"crypto/hmac"
"crypto/sha256"
"crypto/subtle"
"encoding/hex"
"net/http"
"strings"
"time"
)
func userIsAuthenticated(req *http.Request, username, password string) bool {
cookie, _ := req.Cookie("auth")
if cookie == nil {
return false
}
parts := strings.Split(cookie.Value, ":")
if len(parts) != 2 || !stringsEqual(parts[0], username) {
return false
}
return stringsEqual(parts[1], secret(username, password))
}
func userAuthenticate(rw http.ResponseWriter, username, password string) {
expires := time.Now().Add(time.Hour * 24 * 7) // 1 week
var cookiePath string
if BasePath != "" {
cookiePath = BasePath
} else {
cookiePath = "/"
}
cookie := http.Cookie{
Name: "auth",
Value: username + ":" + secret(username, password),
Expires: expires,
Path: cookiePath,
}
http.SetCookie(rw, &cookie)
}
func userLogout(rw http.ResponseWriter) {
cookie := http.Cookie{
Name: "auth",
Value: "",
MaxAge: -1,
}
http.SetCookie(rw, &cookie)
}
func stringsEqual(p1, p2 string) bool {
return subtle.ConstantTimeCompare([]byte(p1), []byte(p2)) == 1
}
func secret(msg, key string) string {
mac := hmac.New(sha256.New, []byte(key))
mac.Write([]byte(msg))
src := mac.Sum(nil)
return hex.EncodeToString(src)
}

313
src/server/crawler.go Normal file
View File

@@ -0,0 +1,313 @@
package server
import (
"bytes"
"errors"
"fmt"
"github.com/PuerkitoBio/goquery"
"github.com/mmcdole/gofeed"
"github.com/nkanaev/yarr/storage"
"io/ioutil"
"net"
"net/http"
"net/url"
"time"
)
type FeedSource struct {
Title string `json:"title"`
Url string `json:"url"`
}
const feedLinks = `
link[type='application/rss+xml'],
link[type='application/atom+xml'],
a[href$="/feed"],
a[href$="/feed/"],
a[href$="feed.xml"],
a[href$="atom.xml"],
a[href$="rss.xml"],
a:contains("rss"),
a:contains("RSS"),
a:contains("feed"),
a:contains("FEED")
`
type Client struct {
httpClient *http.Client
userAgent string
}
func (c *Client) get(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", c.userAgent)
return c.httpClient.Do(req)
}
func (c *Client) getConditional(url, lastModified, etag string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", c.userAgent)
req.Header.Set("If-Modified-Since", lastModified)
req.Header.Set("If-None-Match", etag)
return c.httpClient.Do(req)
}
var defaultClient *Client
func searchFeedLinks(html []byte, siteurl string) ([]FeedSource, error) {
sources := make([]FeedSource, 0, 0)
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(html))
if err != nil {
return sources, err
}
base, err := url.Parse(siteurl)
if err != nil {
return sources, err
}
// feed {url: title} map
feeds := make(map[string]string)
doc.Find(feedLinks).Each(func(i int, s *goquery.Selection) {
// Unlikely to happen, but don't get more than N links
if len(feeds) > 10 {
return
}
if href, ok := s.Attr("href"); ok {
feedUrl, err := url.Parse(href)
if err != nil {
return
}
title := s.AttrOr("title", "")
url := base.ResolveReference(feedUrl).String()
if _, alreadyExists := feeds[url]; alreadyExists {
if feeds[url] == "" {
feeds[url] = title
}
} else {
feeds[url] = title
}
}
})
for url, title := range feeds {
sources = append(sources, FeedSource{Title: title, Url: url})
}
return sources, nil
}
func discoverFeed(candidateUrl string) (*gofeed.Feed, *[]FeedSource, error) {
// Query URL
res, err := defaultClient.get(candidateUrl)
if err != nil {
return nil, nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
errmsg := fmt.Sprintf("Failed to fetch feed %s (status: %d)", candidateUrl, res.StatusCode)
return nil, nil, errors.New(errmsg)
}
content, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, nil, err
}
// Try to feed into parser
feedparser := gofeed.NewParser()
feed, err := feedparser.Parse(bytes.NewReader(content))
if err == nil {
// WILD: feeds may not always have link to themselves
if len(feed.FeedLink) == 0 {
feed.FeedLink = candidateUrl
}
// WILD: resolve relative links (path, without host)
base, _ := url.Parse(candidateUrl)
if link, err := url.Parse(feed.Link); err == nil && link.Host == "" {
feed.Link = base.ResolveReference(link).String()
}
if link, err := url.Parse(feed.FeedLink); err == nil && link.Host == "" {
feed.FeedLink = base.ResolveReference(link).String()
}
return feed, nil, nil
}
// Possibly an html link. Search for feed links
sources, err := searchFeedLinks(content, candidateUrl)
if err != nil {
return nil, nil, err
} else if len(sources) == 0 {
return nil, nil, errors.New("No feeds found at the given url")
} else if len(sources) == 1 {
if sources[0].Url == candidateUrl {
return nil, nil, errors.New("Recursion!")
}
return discoverFeed(sources[0].Url)
}
return nil, &sources, nil
}
func findFavicon(websiteUrl, feedUrl string) (*[]byte, error) {
candidateUrls := make([]string, 0)
favicon := func(link string) string {
u, err := url.Parse(link)
if err != nil {
return ""
}
return fmt.Sprintf("%s://%s/favicon.ico", u.Scheme, u.Host)
}
if len(websiteUrl) != 0 {
base, err := url.Parse(websiteUrl)
if err != nil {
return nil, err
}
res, err := defaultClient.get(websiteUrl)
if err != nil {
return nil, err
}
defer res.Body.Close()
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return nil, err
}
doc.Find(`link[rel=icon]`).EachWithBreak(func(i int, s *goquery.Selection) bool {
if href, ok := s.Attr("href"); ok {
if hrefUrl, err := url.Parse(href); err == nil {
faviconUrl := base.ResolveReference(hrefUrl).String()
candidateUrls = append(candidateUrls, faviconUrl)
}
}
return true
})
if c := favicon(websiteUrl); len(c) != 0 {
candidateUrls = append(candidateUrls, c)
}
}
if c := favicon(feedUrl); len(c) != 0 {
candidateUrls = append(candidateUrls, c)
}
imageTypes := [4]string{
"image/x-icon",
"image/png",
"image/jpeg",
"image/gif",
}
for _, url := range candidateUrls {
res, err := defaultClient.get(url)
if err != nil {
continue
}
defer res.Body.Close()
if res.StatusCode == 200 {
if content, err := ioutil.ReadAll(res.Body); err == nil {
ctype := http.DetectContentType(content)
for _, itype := range imageTypes {
if ctype == itype {
return &content, nil
}
}
}
}
}
return nil, nil
}
func convertItems(items []*gofeed.Item, feed storage.Feed) []storage.Item {
result := make([]storage.Item, len(items))
for i, item := range items {
imageURL := ""
if item.Image != nil {
imageURL = item.Image.URL
}
author := ""
if item.Author != nil {
author = item.Author.Name
}
result[i] = storage.Item{
GUID: item.GUID,
FeedId: feed.Id,
Title: item.Title,
Link: item.Link,
Description: item.Description,
Content: item.Content,
Author: author,
Date: item.PublishedParsed,
DateUpdated: item.UpdatedParsed,
Status: storage.UNREAD,
Image: imageURL,
}
}
return result
}
func listItems(f storage.Feed, db *storage.Storage) ([]storage.Item, error) {
var res *http.Response
var err error
httpState := db.GetHTTPState(f.Id)
if httpState != nil {
res, err = defaultClient.getConditional(f.FeedLink, httpState.LastModified, httpState.Etag)
} else {
res, err = defaultClient.get(f.FeedLink)
}
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode/100 == 4 || res.StatusCode/100 == 5 {
errmsg := fmt.Sprintf("Failed to list feed items for %s (status: %d)", f.FeedLink, res.StatusCode)
return nil, errors.New(errmsg)
}
if res.StatusCode == 304 {
return nil, nil
}
lastModified := res.Header.Get("Last-Modified")
etag := res.Header.Get("Etag")
if lastModified != "" || etag != "" {
db.SetHTTPState(f.Id, lastModified, etag)
}
feedparser := gofeed.NewParser()
feed, err := feedparser.Parse(res.Body)
if err != nil {
return nil, err
}
return convertItems(feed.Items, f), nil
}
func init() {
transport := &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 10 * time.Second,
}).DialContext,
DisableKeepAlives: true,
TLSHandshakeTimeout: time.Second * 10,
}
httpClient := &http.Client{
Timeout: time.Second * 30,
Transport: transport,
}
defaultClient = &Client{
httpClient: httpClient,
userAgent: "Yarr/1.0",
}
}

477
src/server/handlers.go Normal file
View File

@@ -0,0 +1,477 @@
package server
import (
"encoding/json"
"fmt"
"github.com/nkanaev/yarr/storage"
"github.com/nkanaev/yarr/assets"
"html"
"io/ioutil"
"math"
"net/http"
"reflect"
"strconv"
"strings"
)
// TODO: gzip?
var StaticHandler = http.StripPrefix("/static/", http.FileServer(http.FS(assets.FS))).ServeHTTP
var routes []Route = []Route{
p("/", IndexHandler).ManualAuth(),
p("/static/*path", StaticHandler).ManualAuth(),
p("/api/status", StatusHandler),
p("/api/folders", FolderListHandler),
p("/api/folders/:id", FolderHandler),
p("/api/feeds", FeedListHandler),
p("/api/feeds/find", FeedHandler),
p("/api/feeds/refresh", FeedRefreshHandler),
p("/api/feeds/errors", FeedErrorsHandler),
p("/api/feeds/:id/icon", FeedIconHandler),
p("/api/feeds/:id", FeedHandler),
p("/api/items", ItemListHandler),
p("/api/items/:id", ItemHandler),
p("/api/settings", SettingsHandler),
p("/opml/import", OPMLImportHandler),
p("/opml/export", OPMLExportHandler),
p("/page", PageCrawlHandler),
p("/logout", LogoutHandler),
}
type FolderCreateForm struct {
Title string `json:"title"`
}
type FolderUpdateForm struct {
Title *string `json:"title,omitempty"`
IsExpanded *bool `json:"is_expanded,omitempty"`
}
type FeedCreateForm struct {
Url string `json:"url"`
FolderID *int64 `json:"folder_id,omitempty"`
}
type ItemUpdateForm struct {
Status *storage.ItemStatus `json:"status,omitempty"`
}
func IndexHandler(rw http.ResponseWriter, req *http.Request) {
h := handler(req)
if h.requiresAuth() && !userIsAuthenticated(req, h.Username, h.Password) {
if req.Method == "POST" {
username := req.FormValue("username")
password := req.FormValue("password")
if stringsEqual(username, h.Username) && stringsEqual(password, h.Password) {
userAuthenticate(rw, username, password)
http.Redirect(rw, req, req.URL.Path, http.StatusFound)
return
}
}
rw.Header().Set("Content-Type", "text/html")
assets.Render("login.html", rw, nil)
return
}
rw.Header().Set("Content-Type", "text/html")
assets.Render("index.html", rw, nil)
}
/*
func StaticHandler(rw http.ResponseWriter, req *http.Request) {
http.StripPrefix("/static/", http.FileServer(http.FS(assets.FS))).ServeHTTP(rw, req)
ctype := mime.TypeByExtension(filepath.Ext(path))
if assets != nil {
if asset, ok := assets[path]; ok {
if req.Header.Get("if-none-match") == asset.etag {
rw.WriteHeader(http.StatusNotModified)
return
}
rw.Header().Set("Content-Type", ctype)
rw.Header().Set("Content-Encoding", "gzip")
rw.Header().Set("Etag", asset.etag)
rw.Write(*asset.gzip())
}
}
f, err := os.Open("assets/" + path)
if err != nil {
return
}
defer f.Close()
rw.Header().Set("Content-Type", ctype)
io.Copy(rw, f)
}
*/
func StatusHandler(rw http.ResponseWriter, req *http.Request) {
writeJSON(rw, map[string]interface{}{
"running": *handler(req).queueSize,
"stats": db(req).FeedStats(),
})
}
func FolderListHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "GET" {
list := db(req).ListFolders()
writeJSON(rw, list)
} else if req.Method == "POST" {
var body FolderCreateForm
if err := json.NewDecoder(req.Body).Decode(&body); err != nil {
handler(req).log.Print(err)
rw.WriteHeader(http.StatusBadRequest)
return
}
if len(body.Title) == 0 {
rw.WriteHeader(http.StatusBadRequest)
writeJSON(rw, map[string]string{"error": "Folder title missing."})
return
}
folder := db(req).CreateFolder(body.Title)
rw.WriteHeader(http.StatusCreated)
writeJSON(rw, folder)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func FolderHandler(rw http.ResponseWriter, req *http.Request) {
id, err := strconv.ParseInt(Vars(req)["id"], 10, 64)
if err != nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
if req.Method == "PUT" {
var body FolderUpdateForm
if err := json.NewDecoder(req.Body).Decode(&body); err != nil {
handler(req).log.Print(err)
rw.WriteHeader(http.StatusBadRequest)
return
}
if body.Title != nil {
db(req).RenameFolder(id, *body.Title)
}
if body.IsExpanded != nil {
db(req).ToggleFolderExpanded(id, *body.IsExpanded)
}
rw.WriteHeader(http.StatusOK)
} else if req.Method == "DELETE" {
db(req).DeleteFolder(id)
rw.WriteHeader(http.StatusNoContent)
}
}
func FeedRefreshHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "POST" {
handler(req).fetchAllFeeds()
rw.WriteHeader(http.StatusOK)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func FeedErrorsHandler(rw http.ResponseWriter, req *http.Request) {
errors := db(req).GetFeedErrors()
writeJSON(rw, errors)
}
func FeedIconHandler(rw http.ResponseWriter, req *http.Request) {
id, err := strconv.ParseInt(Vars(req)["id"], 10, 64)
if err != nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
feed := db(req).GetFeed(id)
if feed != nil && feed.Icon != nil {
rw.Header().Set("Content-Type", http.DetectContentType(*feed.Icon))
rw.Header().Set("Content-Length", strconv.Itoa(len(*feed.Icon)))
rw.Write(*feed.Icon)
} else {
rw.WriteHeader(http.StatusNotFound)
}
}
func FeedListHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "GET" {
list := db(req).ListFeeds()
writeJSON(rw, list)
} else if req.Method == "POST" {
var form FeedCreateForm
if err := json.NewDecoder(req.Body).Decode(&form); err != nil {
handler(req).log.Print(err)
rw.WriteHeader(http.StatusBadRequest)
return
}
feed, sources, err := discoverFeed(form.Url)
if err != nil {
handler(req).log.Print(err)
writeJSON(rw, map[string]string{"status": "notfound"})
return
}
if feed != nil {
storedFeed := db(req).CreateFeed(
feed.Title,
feed.Description,
feed.Link,
feed.FeedLink,
form.FolderID,
)
db(req).CreateItems(convertItems(feed.Items, *storedFeed))
icon, err := findFavicon(storedFeed.Link, storedFeed.FeedLink)
if icon != nil {
db(req).UpdateFeedIcon(storedFeed.Id, icon)
}
if err != nil {
handler(req).log.Printf("Failed to find favicon for %s (%d): %s", storedFeed.FeedLink, storedFeed.Id, err)
}
writeJSON(rw, map[string]string{"status": "success"})
} else if sources != nil {
writeJSON(rw, map[string]interface{}{"status": "multiple", "choice": sources})
} else {
writeJSON(rw, map[string]string{"status": "notfound"})
}
}
}
func FeedHandler(rw http.ResponseWriter, req *http.Request) {
id, err := strconv.ParseInt(Vars(req)["id"], 10, 64)
if err != nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
if req.Method == "PUT" {
feed := db(req).GetFeed(id)
if feed == nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
body := make(map[string]interface{})
if err := json.NewDecoder(req.Body).Decode(&body); err != nil {
handler(req).log.Print(err)
rw.WriteHeader(http.StatusBadRequest)
return
}
if title, ok := body["title"]; ok {
if reflect.TypeOf(title).Kind() == reflect.String {
db(req).RenameFeed(id, title.(string))
}
}
if f_id, ok := body["folder_id"]; ok {
if f_id == nil {
db(req).UpdateFeedFolder(id, nil)
} else if reflect.TypeOf(f_id).Kind() == reflect.Float64 {
folderId := int64(f_id.(float64))
db(req).UpdateFeedFolder(id, &folderId)
}
}
rw.WriteHeader(http.StatusOK)
} else if req.Method == "DELETE" {
db(req).DeleteFeed(id)
rw.WriteHeader(http.StatusNoContent)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func ItemHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "PUT" {
id, err := strconv.ParseInt(Vars(req)["id"], 10, 64)
if err != nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
var body ItemUpdateForm
if err := json.NewDecoder(req.Body).Decode(&body); err != nil {
handler(req).log.Print(err)
rw.WriteHeader(http.StatusBadRequest)
return
}
if body.Status != nil {
db(req).UpdateItemStatus(id, *body.Status)
}
rw.WriteHeader(http.StatusOK)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func ItemListHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "GET" {
perPage := 20
curPage := 1
query := req.URL.Query()
if page, err := strconv.ParseInt(query.Get("page"), 10, 64); err == nil {
curPage = int(page)
}
filter := storage.ItemFilter{}
if folderID, err := strconv.ParseInt(query.Get("folder_id"), 10, 64); err == nil {
filter.FolderID = &folderID
}
if feedID, err := strconv.ParseInt(query.Get("feed_id"), 10, 64); err == nil {
filter.FeedID = &feedID
}
if status := query.Get("status"); len(status) != 0 {
statusValue := storage.StatusValues[status]
filter.Status = &statusValue
}
if search := query.Get("search"); len(search) != 0 {
filter.Search = &search
}
newestFirst := query.Get("oldest_first") != "true"
items := db(req).ListItems(filter, (curPage-1)*perPage, perPage, newestFirst)
count := db(req).CountItems(filter)
writeJSON(rw, map[string]interface{}{
"page": map[string]int{
"cur": curPage,
"num": int(math.Ceil(float64(count) / float64(perPage))),
},
"list": items,
})
} else if req.Method == "PUT" {
query := req.URL.Query()
filter := storage.MarkFilter{}
if folderID, err := strconv.ParseInt(query.Get("folder_id"), 10, 64); err == nil {
filter.FolderID = &folderID
}
if feedID, err := strconv.ParseInt(query.Get("feed_id"), 10, 64); err == nil {
filter.FeedID = &feedID
}
db(req).MarkItemsRead(filter)
rw.WriteHeader(http.StatusOK)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func SettingsHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "GET" {
writeJSON(rw, db(req).GetSettings())
} else if req.Method == "PUT" {
settings := make(map[string]interface{})
if err := json.NewDecoder(req.Body).Decode(&settings); err != nil {
rw.WriteHeader(http.StatusBadRequest)
return
}
if db(req).UpdateSettings(settings) {
if _, ok := settings["refresh_rate"]; ok {
handler(req).refreshRate <- db(req).GetSettingsValueInt64("refresh_rate")
}
rw.WriteHeader(http.StatusOK)
} else {
rw.WriteHeader(http.StatusBadRequest)
}
}
}
func OPMLImportHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "POST" {
file, _, err := req.FormFile("opml")
if err != nil {
handler(req).log.Print(err)
return
}
doc, err := parseOPML(file)
if err != nil {
handler(req).log.Print(err)
return
}
for _, outline := range doc.Outlines {
if outline.Type == "rss" {
db(req).CreateFeed(outline.Title, outline.Description, outline.SiteURL, outline.FeedURL, nil)
} else {
folder := db(req).CreateFolder(outline.Title)
for _, o := range outline.AllFeeds() {
db(req).CreateFeed(o.Title, o.Description, o.SiteURL, o.FeedURL, &folder.Id)
}
}
}
handler(req).fetchAllFeeds()
rw.WriteHeader(http.StatusOK)
} else {
rw.WriteHeader(http.StatusMethodNotAllowed)
}
}
func OPMLExportHandler(rw http.ResponseWriter, req *http.Request) {
if req.Method == "GET" {
rw.Header().Set("Content-Type", "application/xml; charset=utf-8")
rw.Header().Set("Content-Disposition", `attachment; filename="subscriptions.opml"`)
builder := strings.Builder{}
line := func(s string, args ...string) {
if len(args) > 0 {
escapedargs := make([]interface{}, len(args))
for idx, arg := range args {
escapedargs[idx] = html.EscapeString(arg)
}
s = fmt.Sprintf(s, escapedargs...)
}
builder.WriteString(s)
builder.WriteString("\n")
}
feedline := func(feed storage.Feed, indent int) {
line(
strings.Repeat(" ", indent)+
`<outline type="rss" text="%s" description="%s" xmlUrl="%s" htmlUrl="%s"/>`,
feed.Title, feed.Description,
feed.FeedLink, feed.Link,
)
}
line(`<?xml version="1.0" encoding="UTF-8"?>`)
line(`<opml version="1.1">`)
line(`<head>`)
line(` <title>subscriptions.opml</title>`)
line(`</head>`)
line(`<body>`)
feedsByFolderID := make(map[int64][]storage.Feed)
for _, feed := range db(req).ListFeeds() {
var folderId = int64(0)
if feed.FolderId != nil {
folderId = *feed.FolderId
}
if feedsByFolderID[folderId] == nil {
feedsByFolderID[folderId] = make([]storage.Feed, 0)
}
feedsByFolderID[folderId] = append(feedsByFolderID[folderId], feed)
}
for _, folder := range db(req).ListFolders() {
line(` <outline text="%s">`, folder.Title)
for _, feed := range feedsByFolderID[folder.Id] {
feedline(feed, 4)
}
line(` </outline>`)
}
for _, feed := range feedsByFolderID[0] {
feedline(feed, 2)
}
line(`</body>`)
line(`</opml>`)
rw.Write([]byte(builder.String()))
}
}
func PageCrawlHandler(rw http.ResponseWriter, req *http.Request) {
query := req.URL.Query()
if url := query.Get("url"); len(url) > 0 {
res, err := http.Get(url)
if err == nil {
body, err := ioutil.ReadAll(res.Body)
if err == nil {
rw.Write(body)
}
}
}
}
func LogoutHandler(rw http.ResponseWriter, req *http.Request) {
userLogout(rw)
rw.WriteHeader(http.StatusNoContent)
}

42
src/server/opml.go Normal file
View File

@@ -0,0 +1,42 @@
package server
import (
"encoding/xml"
"io"
)
type opml struct {
XMLName xml.Name `xml:"opml"`
Version string `xml:"version,attr"`
Outlines []outline `xml:"body>outline"`
}
type outline struct {
Type string `xml:"type,attr,omitempty"`
Title string `xml:"text,attr"`
FeedURL string `xml:"xmlUrl,attr,omitempty"`
SiteURL string `xml:"htmlUrl,attr,omitempty"`
Description string `xml:"description,attr,omitempty"`
Outlines []outline `xml:"outline,omitempty"`
}
func (o outline) AllFeeds() []outline {
result := make([]outline, 0)
for _, sub := range o.Outlines {
if sub.Type == "rss" {
result = append(result, sub)
} else {
result = append(result, sub.AllFeeds()...)
}
}
return result
}
func parseOPML(r io.Reader) (*opml, error) {
feeds := new(opml)
decoder := xml.NewDecoder(r)
decoder.Entity = xml.HTMLEntity
decoder.Strict = false
err := decoder.Decode(&feeds)
return feeds, err
}

17
src/server/response.go Normal file
View File

@@ -0,0 +1,17 @@
package server
import (
"encoding/json"
"log"
"net/http"
)
func writeJSON(rw http.ResponseWriter, data interface{}) {
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
reply, err := json.Marshal(data)
if err != nil {
log.Fatal(err)
}
rw.Write(reply)
rw.Write([]byte("\n"))
}

50
src/server/router.go Normal file
View File

@@ -0,0 +1,50 @@
package server
import (
"net/http"
"regexp"
)
var BasePath string = ""
type Route struct {
url string
urlRegex *regexp.Regexp
handler func(http.ResponseWriter, *http.Request)
manualAuth bool
}
func (r Route) ManualAuth() Route {
r.manualAuth = true
return r
}
func p(path string, handler http.HandlerFunc) Route {
var urlRegexp string
urlRegexp = regexp.MustCompile(`[\*\:]\w+`).ReplaceAllStringFunc(path, func(m string) string {
if m[0:1] == `*` {
return "(?P<" + m[1:] + ">.+)"
}
return "(?P<" + m[1:] + ">[^/]+)"
})
urlRegexp = "^" + urlRegexp + "$"
return Route{
url: path,
urlRegex: regexp.MustCompile(urlRegexp),
handler: handler,
}
}
func getRoute(reqPath string) (*Route, map[string]string) {
vars := make(map[string]string)
for _, route := range routes {
if route.urlRegex.MatchString(reqPath) {
matches := route.urlRegex.FindStringSubmatchIndex(reqPath)
for i, key := range route.urlRegex.SubexpNames()[1:] {
vars[key] = reqPath[matches[i*2+2]:matches[i*2+3]]
}
return &route, vars
}
}
return nil, nil
}

226
src/server/server.go Normal file
View File

@@ -0,0 +1,226 @@
package server
import (
"context"
"log"
"net/http"
"runtime"
"strings"
"sync/atomic"
"time"
"github.com/nkanaev/yarr/storage"
)
type Handler struct {
Addr string
db *storage.Storage
log *log.Logger
feedQueue chan storage.Feed
queueSize *int32
refreshRate chan int64
// auth
Username string
Password string
// https
CertFile string
KeyFile string
}
func New(db *storage.Storage, logger *log.Logger, addr string) *Handler {
queueSize := int32(0)
return &Handler{
db: db,
log: logger,
feedQueue: make(chan storage.Feed, 3000),
queueSize: &queueSize,
Addr: addr,
refreshRate: make(chan int64),
}
}
func (h *Handler) GetAddr() string {
proto := "http"
if h.CertFile != "" && h.KeyFile != "" {
proto = "https"
}
return proto + "://" + h.Addr + BasePath
}
func (h *Handler) Start() {
h.startJobs()
s := &http.Server{Addr: h.Addr, Handler: h}
var err error
if h.CertFile != "" && h.KeyFile != "" {
err = s.ListenAndServeTLS(h.CertFile, h.KeyFile)
} else {
err = s.ListenAndServe()
}
if err != http.ErrServerClosed {
h.log.Fatal(err)
}
}
func unsafeMethod(method string) bool {
return method == "POST" || method == "PUT" || method == "DELETE"
}
func (h Handler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
reqPath := req.URL.Path
if BasePath != "" {
if !strings.HasPrefix(reqPath, BasePath) {
rw.WriteHeader(http.StatusNotFound)
return
}
reqPath = strings.TrimPrefix(req.URL.Path, BasePath)
if reqPath == "" {
http.Redirect(rw, req, BasePath+"/", http.StatusFound)
return
}
}
route, vars := getRoute(reqPath)
if route == nil {
rw.WriteHeader(http.StatusNotFound)
return
}
if h.requiresAuth() && !route.manualAuth {
if unsafeMethod(req.Method) && req.Header.Get("X-Requested-By") != "yarr" {
rw.WriteHeader(http.StatusUnauthorized)
return
}
if !userIsAuthenticated(req, h.Username, h.Password) {
rw.WriteHeader(http.StatusUnauthorized)
return
}
}
ctx := context.WithValue(req.Context(), ctxHandler, &h)
ctx = context.WithValue(ctx, ctxVars, vars)
route.handler(rw, req.WithContext(ctx))
}
func (h *Handler) startJobs() {
delTicker := time.NewTicker(time.Hour * 24)
syncSearchChannel := make(chan bool, 10)
var syncSearchTimer *time.Timer // TODO: should this be atomic?
syncSearch := func() {
if syncSearchTimer == nil {
syncSearchTimer = time.AfterFunc(time.Second*2, func() {
syncSearchChannel <- true
})
} else {
syncSearchTimer.Reset(time.Second * 2)
}
}
worker := func() {
for {
select {
case feed := <-h.feedQueue:
items, err := listItems(feed, h.db)
atomic.AddInt32(h.queueSize, -1)
if err != nil {
h.log.Printf("Failed to fetch %s (%d): %s", feed.FeedLink, feed.Id, err)
h.db.SetFeedError(feed.Id, err)
continue
}
h.db.CreateItems(items)
syncSearch()
if !feed.HasIcon {
icon, err := findFavicon(feed.Link, feed.FeedLink)
if icon != nil {
h.db.UpdateFeedIcon(feed.Id, icon)
}
if err != nil {
h.log.Printf("Failed to search favicon for %s (%s): %s", feed.Link, feed.FeedLink, err)
}
}
case <-delTicker.C:
h.db.DeleteOldItems()
case <-syncSearchChannel:
h.db.SyncSearch()
}
}
}
num := runtime.NumCPU() - 1
if num < 1 {
num = 1
}
for i := 0; i < num; i++ {
go worker()
}
go h.db.DeleteOldItems()
go h.db.SyncSearch()
go func() {
var refreshTicker *time.Ticker
refreshTick := make(<-chan time.Time)
for {
select {
case <-refreshTick:
h.fetchAllFeeds()
case val := <-h.refreshRate:
if refreshTicker != nil {
refreshTicker.Stop()
if val == 0 {
refreshTick = make(<-chan time.Time)
}
}
if val > 0 {
refreshTicker = time.NewTicker(time.Duration(val) * time.Minute)
refreshTick = refreshTicker.C
}
}
}
}()
refreshRate := h.db.GetSettingsValueInt64("refresh_rate")
h.refreshRate <- refreshRate
if refreshRate > 0 {
h.fetchAllFeeds()
}
}
func (h Handler) requiresAuth() bool {
return h.Username != "" && h.Password != ""
}
func (h *Handler) fetchAllFeeds() {
h.log.Print("Refreshing all feeds")
h.db.ResetFeedErrors()
for _, feed := range h.db.ListFeeds() {
h.fetchFeed(feed)
}
}
func (h *Handler) fetchFeed(feed storage.Feed) {
atomic.AddInt32(h.queueSize, 1)
h.feedQueue <- feed
}
func Vars(req *http.Request) map[string]string {
if rv := req.Context().Value(ctxVars); rv != nil {
return rv.(map[string]string)
}
return nil
}
func db(req *http.Request) *storage.Storage {
if h := handler(req); h != nil {
return h.db
}
return nil
}
func handler(req *http.Request) *Handler {
return req.Context().Value(ctxHandler).(*Handler)
}
const (
ctxVars = 2
ctxHandler = 3
)