rename packaages

This commit is contained in:
Nazar Kanaev 2021-03-23 15:39:48 +00:00
parent e0009e4267
commit c469749eaa
16 changed files with 23 additions and 23 deletions

View File

@ -1,5 +1,5 @@
// Atom 1.0 parser // Atom 1.0 parser
package feed package parser
import ( import (
"encoding/xml" "encoding/xml"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"bytes" "bytes"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"reflect" "reflect"

View File

@ -1,5 +1,5 @@
// JSON 1.0 parser // JSON 1.0 parser
package feed package parser
import ( import (
"encoding/json" "encoding/json"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package feed package parser
import "time" import "time"

View File

@ -1,7 +1,7 @@
// Parser for RSS versions: // Parser for RSS versions:
// - 0.90 // - 0.90
// - 1.0 // - 1.0
package feed package parser
import ( import (
"encoding/xml" "encoding/xml"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"reflect" "reflect"

View File

@ -2,7 +2,7 @@
// - 0.91 netscape // - 0.91 netscape
// - 0.91 userland // - 0.91 userland
// - 2.0 // - 2.0
package feed package parser
import ( import (
"encoding/xml" "encoding/xml"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package feed package parser
import ( import (
"encoding/xml" "encoding/xml"

View File

@ -1,4 +1,4 @@
package crawler package scraper
import ( import (
"strings" "strings"

View File

@ -1,4 +1,4 @@
package crawler package scraper
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package crawler package scraper
import ( import (
"net/url" "net/url"

View File

@ -11,8 +11,8 @@ import (
"net/url" "net/url"
"time" "time"
"github.com/nkanaev/yarr/src/crawler" "github.com/nkanaev/yarr/src/scraper"
feedparser "github.com/nkanaev/yarr/src/feed" "github.com/nkanaev/yarr/src/parser"
"github.com/nkanaev/yarr/src/storage" "github.com/nkanaev/yarr/src/storage"
"golang.org/x/net/html/charset" "golang.org/x/net/html/charset"
) )
@ -55,13 +55,13 @@ var defaultClient *Client
func searchFeedLinks(html []byte, siteurl string) ([]FeedSource, error) { func searchFeedLinks(html []byte, siteurl string) ([]FeedSource, error) {
sources := make([]FeedSource, 0, 0) sources := make([]FeedSource, 0, 0)
for url, title := range crawler.FindFeeds(string(html), siteurl) { for url, title := range scraper.FindFeeds(string(html), siteurl) {
sources = append(sources, FeedSource{Title: title, Url: url}) sources = append(sources, FeedSource{Title: title, Url: url})
} }
return sources, nil return sources, nil
} }
func DiscoverFeed(candidateUrl string) (*feedparser.Feed, string, *[]FeedSource, error) { func DiscoverFeed(candidateUrl string) (*parser.Feed, string, *[]FeedSource, error) {
// Query URL // Query URL
res, err := defaultClient.get(candidateUrl) res, err := defaultClient.get(candidateUrl)
if err != nil { if err != nil {
@ -78,7 +78,7 @@ func DiscoverFeed(candidateUrl string) (*feedparser.Feed, string, *[]FeedSource,
} }
// Try to feed into parser // Try to feed into parser
feed, err := feedparser.Parse(bytes.NewReader(content)) feed, err := parser.Parse(bytes.NewReader(content))
if err == nil { if err == nil {
/* /*
// WILD: feeds may not always have link to themselves // WILD: feeds may not always have link to themselves
@ -141,7 +141,7 @@ func FindFavicon(websiteUrl, feedUrl string) (*[]byte, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
candidateUrls = append(candidateUrls, crawler.FindIcons(string(body), websiteUrl)...) candidateUrls = append(candidateUrls, scraper.FindIcons(string(body), websiteUrl)...)
if c := favicon(websiteUrl); len(c) != 0 { if c := favicon(websiteUrl); len(c) != 0 {
candidateUrls = append(candidateUrls, c) candidateUrls = append(candidateUrls, c)
} }
@ -176,7 +176,7 @@ func FindFavicon(websiteUrl, feedUrl string) (*[]byte, error) {
return nil, nil return nil, nil
} }
func ConvertItems(items []feedparser.Item, feed storage.Feed) []storage.Item { func ConvertItems(items []parser.Item, feed storage.Feed) []storage.Item {
result := make([]storage.Item, len(items)) result := make([]storage.Item, len(items))
for i, item := range items { for i, item := range items {
item := item item := item
@ -237,7 +237,7 @@ func listItems(f storage.Feed, db *storage.Storage) ([]storage.Item, error) {
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to init response body: %s", err) return nil, fmt.Errorf("failed to init response body: %s", err)
} }
feed, err := feedparser.Parse(body) feed, err := parser.Parse(body)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to parse: %s", err) return nil, fmt.Errorf("failed to parse: %s", err)
} }