Compare commits
187 Commits
v2.0
...
2e77c4f1f3
Author | SHA1 | Date | |
---|---|---|---|
|
2e77c4f1f3 | ||
|
d000c25022 | ||
|
780a0532f3 | ||
|
14780111b8 | ||
|
fbd64e0d1c | ||
|
8042ae5694 | ||
|
7ece385c66 | ||
|
2846ed2e84 | ||
|
0f6d892c8a | ||
|
21bac2b5aa | ||
|
1e5f6583d6 | ||
|
748a51fd20 | ||
|
0e2d377aac | ||
|
14d1bd1fc3 | ||
|
01cadf644c | ||
|
dd79d9404d | ||
|
d45b6c0e79 | ||
|
63a357330c | ||
|
773f8fbd63 | ||
|
de7e8eb7fc | ||
|
2cc9488ef9 | ||
|
46d67267cd | ||
|
e618d5a4e2 | ||
|
9d5b8d99f7 | ||
|
13c047fc21 | ||
|
55751b3eb6 | ||
|
b961502a17 | ||
|
a895145586 | ||
|
5aec3b4dab | ||
|
d787060a24 | ||
|
c1a29418eb | ||
|
17847f999c | ||
|
3adcddc70c | ||
|
c76ff26bd6 | ||
|
50f8648f64 | ||
|
5f82a9e339 | ||
|
3278ba4eac | ||
|
9fc72f8b68 | ||
|
b7b707bd43 | ||
|
7cf27e0fde | ||
|
66f2a973a3 | ||
|
7ecbbff18a | ||
|
850ce195a0 | ||
|
479aebd023 | ||
|
9b178d1fb3 | ||
|
3ab098db5c | ||
|
6d16e93008 | ||
|
98934daee4 | ||
|
259474cae9 | ||
|
1e65a7951b | ||
|
bed5640366 | ||
|
57ea83cf4f | ||
|
219842d723 | ||
|
a96fc101f2 | ||
|
81a77ce0a4 | ||
|
9ed359f964 | ||
|
bc18557820 | ||
|
7d99edab8d | ||
|
32ca121520 | ||
|
9f1a0534a3 | ||
|
d2678be96d | ||
|
95ebbb9d13 | ||
|
0f6d4d639d | ||
|
795a5d2cb4 | ||
|
dd5f760606 | ||
|
58d6a46e36 | ||
|
a8d7b86cdc | ||
|
aac3de7ca2 | ||
|
de24659bae | ||
|
632412c10e | ||
|
012b58bbe4 | ||
|
c092842ee4 | ||
|
e4c1d01915 | ||
|
ce07ddea92 | ||
|
bd6322e533 | ||
|
91da774286 | ||
|
e62906e63d | ||
|
56e5625adc | ||
|
1ecf4b0bb4 | ||
|
57d9421c7f | ||
|
a73188944d | ||
|
97904cc0f3 | ||
|
f28f354992 | ||
|
698f5d6d06 | ||
|
b935a1c511 | ||
|
10e6bfa5a0 | ||
|
f030a4075b | ||
|
c9dd977600 | ||
|
c1bcc0c517 | ||
|
2a5692d9a7 | ||
|
a8d160f9b1 | ||
|
286cbff236 | ||
|
fff0870d3b | ||
|
fe22460c07 | ||
|
18f2789a5d | ||
|
7f161a5408 | ||
|
cba3fbc48c | ||
|
5e46f1480e | ||
|
ead253c55f | ||
|
6b8da92cb3 | ||
|
a91f64ce9d | ||
|
e1a6ccf133 | ||
|
d2c034a850 | ||
|
713930decc | ||
|
ee2a825cf0 | ||
|
8e9da86f83 | ||
|
9eb49fd3a7 | ||
|
684bc25b83 | ||
|
8ceab03cd7 | ||
|
34dad4ac8f | ||
|
b40d930f8a | ||
|
d4b34e900e | ||
|
954b549029 | ||
|
fbd0b2310e | ||
|
be7af0ccaf | ||
|
18221ef12d | ||
|
4c0726412b | ||
|
d7253a60b8 | ||
|
2de3ddff08 | ||
|
830248b6ae | ||
|
f8db2ef7ad | ||
|
109caaa889 | ||
|
d0b83babd2 | ||
|
de3decbffd | ||
|
c92229a698 | ||
|
176852b662 | ||
|
52cc8ecbbd | ||
|
e3e9542f1e | ||
|
b78c8bf8bf | ||
|
bff7476b58 | ||
|
05f5785660 | ||
|
cb50aed89a | ||
|
df655aca5e | ||
|
86853a87bf | ||
|
e3109a4384 | ||
|
eee8002d69 | ||
|
92f11f7513 | ||
|
5428e6be3a | ||
|
1ad693f931 | ||
|
c2d88a7e3f | ||
|
3b29d737eb | ||
|
fe178b8fc6 | ||
|
cca742a1c2 | ||
|
c7eddff118 | ||
|
cf30ed249f | ||
|
26b87dee98 | ||
|
77c7f938f1 | ||
|
f98de9a0a5 | ||
|
6fa2b67024 | ||
|
355e5feb62 | ||
|
a7dd707062 | ||
|
4de46a7bc5 | ||
|
2c6fce3322 | ||
|
19ecfcd0bc | ||
|
d575acfe80 | ||
|
d203d38de6 | ||
|
9f01f63613 | ||
|
982c4ebbbc | ||
|
0c5385cef3 | ||
|
58f4e1f6c9 | ||
|
6b7f69d5c0 | ||
|
7aeb458ee5 | ||
|
7cfd3b3238 | ||
|
55262d38fe | ||
|
a45e29feb7 | ||
|
9f5fd3bb4d | ||
|
63f9d55903 | ||
|
8f36ae013e | ||
|
851aa1a136 | ||
|
f38dcfba3b | ||
|
214c7aacfc | ||
|
eb9bfc57e2 | ||
|
c072783c42 | ||
|
9d701678e1 | ||
|
37ed856d8b | ||
|
28f08ad42a | ||
|
da267a56ef | ||
|
16e4cad9ad | ||
|
d13a04898e | ||
|
ff39fbff70 | ||
|
92c6aac49e | ||
|
4ca81f90e9 | ||
|
75e828cb4c | ||
|
883214a740 | ||
|
36e359c881 | ||
|
87b53fb8ec | ||
|
2ae62855cc |
47
.github/workflows/build-docker-image
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Build Yarr Docker image
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'yarr-version.txt'
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Read latest release tag
|
||||
id: read-tag
|
||||
run: |
|
||||
echo ::set-output name=tag::$(curl -sL https://raw.githubusercontent.com/rebron1900/yarr/main/yarr-version.txt)
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'rebron1900/yarr'
|
||||
ref: ${{ steps.read-tag.outputs.tag }}
|
||||
-
|
||||
name: Remove GOARCH
|
||||
run: |
|
||||
sed -i -e 's/GOARCH=amd64//g' makefile
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
-
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USERNAME }}/yarr:latest
|
||||
${{ secrets.DOCKER_USERNAME }}/yarr:${{ steps.read-tag.outputs.tag }}
|
53
.github/workflows/build.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
jobs:
|
||||
build_macos:
|
||||
name: Build for MacOS
|
||||
runs-on: macos-10.15
|
||||
runs-on: macos-13
|
||||
steps:
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v2
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.16'
|
||||
go-version: '^1.17'
|
||||
- name: Cache Go Modules
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
|
||||
build_windows:
|
||||
name: Build for Windows
|
||||
runs-on: windows-2019
|
||||
runs-on: windows-2022
|
||||
steps:
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v2
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.16'
|
||||
go-version: '^1.17'
|
||||
- name: Cache Go Modules
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
build_linux:
|
||||
name: Build for Linux
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v2
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.16'
|
||||
go-version: '^1.17'
|
||||
- name: Cache Go Modules
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
@@ -100,8 +100,6 @@ jobs:
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ github.ref }}
|
||||
draft: true
|
||||
prerelease: true
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
@@ -131,7 +129,7 @@ jobs:
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./yarr-windows.zip
|
||||
asset_name: yarr-${{ github.ref }}-windows32.zip
|
||||
asset_name: yarr-${{ github.ref }}-windows64.zip
|
||||
asset_content_type: application/zip
|
||||
- name: Upload Linux
|
||||
uses: actions/upload-release-asset@v1
|
||||
@@ -140,5 +138,40 @@ jobs:
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./yarr-linux.zip
|
||||
asset_name: yarr-${{ github.ref }}-linux32.zip
|
||||
asset_name: yarr-${{ github.ref }}-linux64.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
build_docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [create_release]
|
||||
steps:
|
||||
- name: Read latest release tag
|
||||
id: read-tag
|
||||
run: |
|
||||
echo ::set-output name=tag::$(curl -sL https://raw.githubusercontent.com/rebron1900/yarr/master/yarr-version.txt)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'rebron1900/yarr'
|
||||
ref: ${{ steps.read-tag.outputs.tag }}
|
||||
- name: Remove GOARCH
|
||||
run: |
|
||||
sed -i -e 's/GOARCH=amd64//g' makefile
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Push to Docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USERNAME }}/yarr:latest
|
||||
${{ secrets.DOCKER_USERNAME }}/yarr:${{ steps.read-tag.outputs.tag }}
|
||||
|
2
.gitignore
vendored
@@ -1,5 +1,3 @@
|
||||
/server/assets.go
|
||||
/gofeed
|
||||
/_output
|
||||
/yarr
|
||||
*.db
|
||||
|
155
cmd/yarr/main.go
Normal file
@@ -0,0 +1,155 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/nkanaev/yarr/src/platform"
|
||||
"github.com/nkanaev/yarr/src/server"
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
)
|
||||
|
||||
var Version string = "0.0"
|
||||
var GitHash string = "unknown"
|
||||
|
||||
var OptList = make([]string, 0)
|
||||
|
||||
func opt(envVar, defaultValue string) string {
|
||||
OptList = append(OptList, envVar)
|
||||
value := os.Getenv(envVar)
|
||||
if value != "" {
|
||||
return value
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
func parseAuthfile(authfile io.Reader) (username, password string, err error) {
|
||||
scanner := bufio.NewScanner(authfile)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
parts := strings.SplitN(line, ":", 2)
|
||||
if len(parts) != 2 {
|
||||
return "", "", fmt.Errorf("wrong syntax (expected `username:password`)")
|
||||
}
|
||||
username = parts[0]
|
||||
password = parts[1]
|
||||
break
|
||||
}
|
||||
return username, password, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
platform.FixConsoleIfNeeded()
|
||||
|
||||
var addr, db, authfile, auth, certfile, keyfile, basepath, logfile string
|
||||
var ver, open bool
|
||||
|
||||
flag.CommandLine.SetOutput(os.Stdout)
|
||||
|
||||
flag.Usage = func() {
|
||||
out := flag.CommandLine.Output()
|
||||
fmt.Fprintf(out, "Usage of %s:\n", os.Args[0])
|
||||
flag.PrintDefaults()
|
||||
fmt.Fprintln(out, "\nThe environmental variables, if present, will be used to provide\nthe default values for the params above:")
|
||||
fmt.Fprintln(out, " ", strings.Join(OptList, ", "))
|
||||
}
|
||||
|
||||
flag.StringVar(&addr, "addr", opt("YARR_ADDR", "127.0.0.1:7070"), "address to run server on")
|
||||
flag.StringVar(&basepath, "base", opt("YARR_BASE", ""), "base path of the service url")
|
||||
flag.StringVar(&authfile, "auth-file", opt("YARR_AUTHFILE", ""), "`path` to a file containing username:password. Takes precedence over --auth (or YARR_AUTH)")
|
||||
flag.StringVar(&auth, "auth", opt("YARR_AUTH", ""), "string with username and password in the format `username:password`")
|
||||
flag.StringVar(&certfile, "cert-file", opt("YARR_CERTFILE", ""), "`path` to cert file for https")
|
||||
flag.StringVar(&keyfile, "key-file", opt("YARR_KEYFILE", ""), "`path` to key file for https")
|
||||
flag.StringVar(&db, "db", opt("YARR_DB", ""), "storage file `path`")
|
||||
flag.StringVar(&logfile, "log-file", opt("YARR_LOGFILE", ""), "`path` to log file to use instead of stdout")
|
||||
flag.BoolVar(&ver, "version", false, "print application version")
|
||||
flag.BoolVar(&open, "open", false, "open the server in browser")
|
||||
flag.Parse()
|
||||
|
||||
if ver {
|
||||
fmt.Printf("v%s (%s)\n", Version, GitHash)
|
||||
return
|
||||
}
|
||||
|
||||
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
|
||||
if logfile != "" {
|
||||
file, err := os.OpenFile(logfile, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to setup log file: ", err)
|
||||
}
|
||||
defer file.Close()
|
||||
log.SetOutput(file)
|
||||
} else {
|
||||
log.SetOutput(os.Stdout)
|
||||
}
|
||||
|
||||
configPath, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
log.Fatal("Failed to get config dir: ", err)
|
||||
}
|
||||
|
||||
if db == "" {
|
||||
storagePath := filepath.Join(configPath, "yarr")
|
||||
if err := os.MkdirAll(storagePath, 0755); err != nil {
|
||||
log.Fatal("Failed to create app config dir: ", err)
|
||||
}
|
||||
db = filepath.Join(storagePath, "storage.db")
|
||||
}
|
||||
|
||||
log.Printf("using db file %s", db)
|
||||
|
||||
var username, password string
|
||||
if authfile != "" {
|
||||
f, err := os.Open(authfile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open auth file: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
username, password, err = parseAuthfile(f)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to parse auth file: ", err)
|
||||
}
|
||||
} else if auth != "" {
|
||||
username, password, err = parseAuthfile(strings.NewReader(auth))
|
||||
if err != nil {
|
||||
log.Fatal("Failed to parse auth literal: ", err)
|
||||
}
|
||||
}
|
||||
|
||||
if (certfile != "" || keyfile != "") && (certfile == "" || keyfile == "") {
|
||||
log.Fatalf("Both cert & key files are required")
|
||||
}
|
||||
|
||||
store, err := storage.New(db)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to initialise database: ", err)
|
||||
}
|
||||
|
||||
srv := server.NewServer(store, addr)
|
||||
|
||||
if basepath != "" {
|
||||
srv.BasePath = "/" + strings.Trim(basepath, "/")
|
||||
}
|
||||
|
||||
if certfile != "" && keyfile != "" {
|
||||
srv.CertFile = certfile
|
||||
srv.KeyFile = keyfile
|
||||
}
|
||||
|
||||
if username != "" && password != "" {
|
||||
srv.Username = username
|
||||
srv.Password = password
|
||||
}
|
||||
|
||||
log.Printf("starting server at %s", srv.GetAddr())
|
||||
if open {
|
||||
platform.Open(srv.GetAddr())
|
||||
}
|
||||
platform.Start(srv)
|
||||
}
|
47
cmd/yarr/main_test.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPasswordFromAuthfile(t *testing.T) {
|
||||
for _, tc := range [...]struct {
|
||||
authfile string
|
||||
expectedUsername string
|
||||
expectedPassword string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
authfile: "username:password",
|
||||
expectedUsername: "username",
|
||||
expectedPassword: "password",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
authfile: "username-and-no-password",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
authfile: "username:password:with:columns",
|
||||
expectedUsername: "username",
|
||||
expectedPassword: "password:with:columns",
|
||||
expectedError: false,
|
||||
},
|
||||
} {
|
||||
t.Run(tc.authfile, func(t *testing.T) {
|
||||
username, password, err := parseAuthfile(strings.NewReader(tc.authfile))
|
||||
if tc.expectedUsername != username {
|
||||
t.Errorf("expected username %q, got %q", tc.expectedUsername, username)
|
||||
}
|
||||
if tc.expectedPassword != password {
|
||||
t.Errorf("expected password %q, got %q", tc.expectedPassword, password)
|
||||
}
|
||||
if tc.expectedError && err == nil {
|
||||
t.Errorf("expected error, got nil")
|
||||
} else if !tc.expectedError && err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
44
doc/build.md
Normal file
@@ -0,0 +1,44 @@
|
||||
## Compilation
|
||||
|
||||
Install `Go >= 1.17` and `GCC`. Get the source code:
|
||||
|
||||
git clone https://github.com/nkanaev/yarr.git
|
||||
|
||||
Then run one of the corresponding commands:
|
||||
|
||||
# create an executable for the host os
|
||||
make build_macos # -> _output/macos/yarr.app
|
||||
make build_linux # -> _output/linux/yarr
|
||||
make build_windows # -> _output/windows/yarr.exe
|
||||
|
||||
# host-specific cli version (no gui)
|
||||
make build_default # -> _output/yarr
|
||||
|
||||
# ... or start a dev server locally
|
||||
make serve # starts a server at http://localhost:7070
|
||||
|
||||
# ... or build a docker image
|
||||
docker build -t yarr -f etc/dockerfile .
|
||||
|
||||
## ARM compilation
|
||||
|
||||
The instructions below are to cross-compile *yarr* to `Linux/ARM*`.
|
||||
|
||||
Build:
|
||||
|
||||
docker build -t yarr.arm -f etc/dockerfile.arm .
|
||||
|
||||
Test:
|
||||
|
||||
# inside host
|
||||
docker run -it --rm yarr.arm
|
||||
|
||||
# then, inside container
|
||||
cd /root/out
|
||||
qemu-aarch64 -L /usr/aarch64-linux-gnu/ yarr.arm64
|
||||
|
||||
Extract files from images:
|
||||
|
||||
CID=$(docker create yarr.arm)
|
||||
docker cp -a "$CID:/root/out" .
|
||||
docker rm "$CID"
|
@@ -1,3 +1,58 @@
|
||||
# upcoming
|
||||
|
||||
- (new) Fever API support (thanks to @icefed)
|
||||
- (fix) duplicate articles caused by the same feed addition (thanks to @adaszko)
|
||||
- (fix) relative article links (thanks to @adazsko for the report)
|
||||
- (fix) atom article links stored in id element (thanks to @adazsko for the report)
|
||||
- (fix) parsing atom feed titles (thanks to @wnh)
|
||||
- (fix) sorting same-day batch articles (thanks to @lamescholar for the report)
|
||||
|
||||
# v2.4 (2023-08-15)
|
||||
|
||||
- (new) ARM build support (thanks to @tillcash & @fenuks)
|
||||
- (new) auth configuration via param or env variable (thanks to @pierreprinetti)
|
||||
- (new) web app manifest for an app-like experience on mobile (thanks to @qbit)
|
||||
- (fix) concurrency issue crashing the app (thanks to @quoing)
|
||||
- (fix) favicon visibility in dark mode (thanks to @caycaycarly for the report)
|
||||
- (fix) autoloading more articles not working in certain edge cases (thanks to @fenuks for the report)
|
||||
- (fix) handle Google URL redirects in "Read Here" (thanks to @cubbei for discovery)
|
||||
- (fix) handle failures to extract content in "Read Here" (thanks to @grigio for the report)
|
||||
- (fix) article view width for high resolution screens (thanks to @whaler-ragweed for the report)
|
||||
- (fix) make newly added feed searchable (thanks to @BMorearty for the report)
|
||||
- (fix) feed/article selection accessibility via arrow keys (thanks to @grigio and @tillcash)
|
||||
- (fix) keyboard shortcuts in Firefox (thanks to @kaloyan13)
|
||||
- (fix) keyboard shortcuts in non-English layouts (thanks to @kaloyan13)
|
||||
- (fix) sorting articles with timezone information (thanks to @x2cf)
|
||||
- (fix) handling links set in guid only for certain feeds (thanks to @adaszko for the report)
|
||||
- (fix) crashes caused by feed icon endpoint (thanks to @adaszko)
|
||||
|
||||
# v2.3 (2022-05-03)
|
||||
|
||||
- (fix) handling encodings (thanks to @f100024 & @fserb)
|
||||
- (fix) parsing xml feeds with illegal characters (thanks to @stepelu for the report)
|
||||
- (fix) old articles reappearing as unread (thanks to @adaszko for the report)
|
||||
- (fix) item list scrolling issue on large screens (thanks to @bielej for the report)
|
||||
- (fix) keyboard shortcuts color in dark mode (thanks to @John09f9 for the report)
|
||||
- (etc) autofocus when adding a new feed (thanks to @lakuapik)
|
||||
|
||||
# v2.2 (2021-11-20)
|
||||
|
||||
- (fix) windows console support (thanks to @dufferzafar for the report)
|
||||
- (fix) remove html tags from article titles (thanks to Alex Went for the report)
|
||||
- (etc) autoselect current folder when adding a new feed (thanks to @krkk)
|
||||
- (etc) folder/feed settings menu available across all filters
|
||||
|
||||
# v2.1 (2021-08-16)
|
||||
|
||||
- (new) configuration via env variables
|
||||
- (fix) missing `content-type` headers (thanks to @verahawk for the report)
|
||||
- (fix) handle opml files not following the spec (thanks to @huangnauh for the report)
|
||||
- (fix) pagination in unread/starred feeds (thanks to @Farow for the report)
|
||||
- (fix) handling feeds with non-utf8 encodings (thanks to @fserb for the report)
|
||||
- (fix) errors caused by empty feeds (thanks to @decke)
|
||||
- (fix) recognize all audio mime types as podcasts (thanks to @krkk)
|
||||
- (fix) ui tweaks (thanks to @Farow)
|
||||
|
||||
# v2.0 (2021-04-18)
|
||||
|
||||
- (new) user interface tweaks
|
||||
|
19
doc/fever.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Fever API support
|
||||
|
||||
Fever API is a kind of RSS HTTP API interface, because the Fever API definition is not very clear, so the implementation of Fever server and Client may have some compatibility problems.
|
||||
|
||||
The Fever API implemented by Yarr is based on the Fever API spec: https://github.com/DigitalDJ/tinytinyrss-fever-plugin/blob/master/fever-api.md.
|
||||
|
||||
Here are some Apps that have been tested to work with yarr. Feel free to test other Clients/Apps and update the list here.
|
||||
|
||||
> Different apps support different URL/Address formats. Please note whether the URL entered has `http://` scheme and `/` suffix.
|
||||
|
||||
| App | Platforms | Config Server URL |
|
||||
|:------------------------------------------------------------------------- | ---------------- |:--------------------------------------------------- |
|
||||
| [Reeder](https://reederapp.com/) | MacOS<br>iOS | 127.0.0.1:7070/fever<br>http://127.0.0.1:7070/fever |
|
||||
| [ReadKit](https://readkit.app/) | MacOS<br>iOS | http://127.0.0.1:7070/fever |
|
||||
| [Fluent Reader](https://github.com/yang991178/fluent-reader) | MacOS<br>Windows | http://127.0.0.1:7070/fever/ |
|
||||
| [Unread](https://apps.apple.com/us/app/unread-an-rss-reader/id1363637349) | iOS | http://127.0.0.1:7070/fever |
|
||||
| [Fiery Feeds](https://voidstern.net/fiery-feeds) | MacOS<br>iOS | http://127.0.0.1:7070/fever |
|
||||
|
||||
If you are having trouble using Fever, please open an issue and @icefed, thanks.
|
@@ -159,6 +159,7 @@ Delete any from the list in case they drop support of web feeds.
|
||||
- medium
|
||||
- posthaven
|
||||
- reddit
|
||||
- substack
|
||||
- tumblr
|
||||
- vimeo
|
||||
- wordpress
|
||||
|
@@ -20,3 +20,8 @@ The licenses are included, and the authorship comments are left intact.
|
||||
https://github.com/getlantern/systray (commit:2c0986d) Apache 2.0
|
||||
|
||||
removed golog dependency
|
||||
|
||||
- fixconsole
|
||||
https://github.com/apenwarr/fixconsole (commit:5a9f648) Apache 2.0
|
||||
|
||||
removed `w32` dependency
|
||||
|
0
doc/todo.txt
Normal file
44
etc/dockerfile.arm
Normal file
@@ -0,0 +1,44 @@
|
||||
FROM ubuntu:20.04
|
||||
|
||||
# Install GCC
|
||||
RUN apt update
|
||||
RUN apt install -y \
|
||||
wget build-essential \
|
||||
gcc-aarch64-linux-gnu \
|
||||
binutils-aarch64-linux-gnu binutils-aarch64-linux-gnu-dbg \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
binutils-arm-linux-gnueabihf binutils-arm-linux-gnueabihf-dbg
|
||||
RUN env DEBIAN_FRONTEND=noninteractive \
|
||||
apt install -y qemu-user qemu-user-static
|
||||
|
||||
# Install Golang
|
||||
RUN wget --quiet https://go.dev/dl/go1.18.2.linux-amd64.tar.gz && \
|
||||
rm -rf /usr/local/go && \
|
||||
tar -C /usr/local -xzf go1.18.2.linux-amd64.tar.gz
|
||||
ENV PATH=$PATH:/usr/local/go/bin
|
||||
|
||||
# Copy source code
|
||||
WORKDIR /root/src
|
||||
RUN mkdir /root/out
|
||||
COPY . .
|
||||
|
||||
# Build ARM64
|
||||
RUN env \
|
||||
CC=aarch64-linux-gnu-gcc \
|
||||
CGO_ENABLED=1 \
|
||||
GOOS=linux GOARCH=arm64 \
|
||||
go build \
|
||||
-tags "sqlite_foreign_keys linux" \
|
||||
-ldflags="-s -w" \
|
||||
-o /root/out/yarr.arm64 ./cmd/yarr
|
||||
|
||||
RUN env \
|
||||
CC=arm-linux-gnueabihf-gcc \
|
||||
CGO_ENABLED=1 \
|
||||
GOOS=linux GOARCH=arm GOARM=7 \
|
||||
go build \
|
||||
-tags "sqlite_foreign_keys linux" \
|
||||
-ldflags="-s -w" \
|
||||
-o /root/out/yarr.arm7 ./cmd/yarr
|
||||
|
||||
CMD ["/bin/bash"]
|
23
etc/install-linux.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
cat >"$HOME/.local/share/applications/yarr.desktop" <<END
|
||||
[Desktop Entry]
|
||||
Name=yarr
|
||||
Exec=$HOME/.local/bin/yarr -open
|
||||
Icon=yarr
|
||||
Type=Application
|
||||
Categories=Internet;
|
||||
END
|
||||
|
||||
cat >"$HOME/.local/share/icons/yarr.svg" <<END
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-anchor-favicon">
|
||||
<circle cx="12" cy="5" r="3" stroke-width="4" stroke="#ffffff"></circle>
|
||||
<line x1="12" y1="22" x2="12" y2="8" stroke-width="4" stroke="#ffffff"></line>
|
||||
<path d="M5 12H2a10 10 0 0 0 20 0h-3" stroke-width="4" stroke="#ffffff"></path>
|
||||
|
||||
<circle cx="12" cy="5" r="3"></circle>
|
||||
<line x1="12" y1="22" x2="12" y2="8"></line>
|
||||
<path d="M5 12H2a10 10 0 0 0 20 0h-3"></path>
|
||||
</svg>
|
||||
END
|
BIN
etc/promo.png
Before Width: | Height: | Size: 727 KiB After Width: | Height: | Size: 173 KiB |
10
go.mod
@@ -1,9 +1,11 @@
|
||||
module github.com/nkanaev/yarr
|
||||
|
||||
go 1.16
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/mattn/go-sqlite3 v1.14.0
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e
|
||||
golang.org/x/sys v0.0.0-20201018230417-eeed37f84f13
|
||||
github.com/mattn/go-sqlite3 v1.14.7
|
||||
golang.org/x/net v0.17.0
|
||||
golang.org/x/sys v0.13.0
|
||||
)
|
||||
|
||||
require golang.org/x/text v0.13.0 // indirect
|
||||
|
54
go.sum
@@ -1,15 +1,45 @@
|
||||
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.0 h1:mLyGNKR8+Vv9CAU7PphKa2hkEqxxhn8i32J6FPj1/QA=
|
||||
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
|
||||
github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA=
|
||||
github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201018230417-eeed37f84f13 h1:5jaG59Zhd+8ZXe8C+lgiAGqkOaZBruqrWclLkgAww34=
|
||||
golang.org/x/sys v0.0.0-20201018230417-eeed37f84f13/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
24
makefile
@@ -1,4 +1,4 @@
|
||||
VERSION=2.0
|
||||
VERSION=2.5
|
||||
GITHASH=$(shell git rev-parse --short=8 HEAD)
|
||||
|
||||
CGO_ENABLED=1
|
||||
@@ -8,32 +8,26 @@ GO_LDFLAGS := $(GO_LDFLAGS) -X 'main.Version=$(VERSION)' -X 'main.GitHash=$(GITH
|
||||
|
||||
build_default:
|
||||
mkdir -p _output
|
||||
go build -tags "sqlite_foreign_keys release" -ldflags="$(GO_LDFLAGS)" -o _output/yarr src/main.go
|
||||
go build -tags "sqlite_foreign_keys" -ldflags="$(GO_LDFLAGS)" -o _output/yarr ./cmd/yarr
|
||||
|
||||
build_macos:
|
||||
set GOOS=darwin
|
||||
set GOARCH=amd64
|
||||
mkdir -p _output/macos
|
||||
go build -tags "sqlite_foreign_keys release macos" -ldflags="$(GO_LDFLAGS)" -o _output/macos/yarr src/main.go
|
||||
GOOS=darwin GOARCH=amd64 go build -tags "sqlite_foreign_keys macos" -ldflags="$(GO_LDFLAGS)" -o _output/macos/yarr ./cmd/yarr
|
||||
cp src/platform/icon.png _output/macos/icon.png
|
||||
go run bin/package_macos.go -outdir _output/macos -version "$(VERSION)"
|
||||
go run ./cmd/package_macos -outdir _output/macos -version "$(VERSION)"
|
||||
|
||||
build_linux:
|
||||
set GOOS=linux
|
||||
set GOARCH=386
|
||||
mkdir -p _output/linux
|
||||
go build -tags "sqlite_foreign_keys release linux" -ldflags="$(GO_LDFLAGS)" -o _output/linux/yarr src/main.go
|
||||
GOOS=linux GOARCH=amd64 go build -tags "sqlite_foreign_keys linux" -ldflags="$(GO_LDFLAGS)" -o _output/linux/yarr ./cmd/yarr
|
||||
|
||||
build_windows:
|
||||
set GOOS=windows
|
||||
set GOARCH=386
|
||||
mkdir -p _output/windows
|
||||
go run bin/generate_versioninfo.go -version "$(VERSION)" -outfile src/platform/versioninfo.rc
|
||||
go run ./cmd/generate_versioninfo -version "$(VERSION)" -outfile src/platform/versioninfo.rc
|
||||
windres -i src/platform/versioninfo.rc -O coff -o src/platform/versioninfo.syso
|
||||
go build -tags "sqlite_foreign_keys release windows" -ldflags="$(GO_LDFLAGS) -H windowsgui" -o _output/windows/yarr.exe src/main.go
|
||||
GOOS=windows GOARCH=amd64 go build -tags "sqlite_foreign_keys windows" -ldflags="$(GO_LDFLAGS) -H windowsgui" -o _output/windows/yarr.exe ./cmd/yarr
|
||||
|
||||
serve:
|
||||
go run -tags "sqlite_foreign_keys" src/main.go -db local.db
|
||||
go run -tags "sqlite_foreign_keys" ./cmd/yarr -db local.db
|
||||
|
||||
test:
|
||||
cd src && go test -tags "sqlite_foreign_keys release" ./...
|
||||
go test -tags "sqlite_foreign_keys" ./...
|
126
readme.md
@@ -3,72 +3,100 @@
|
||||
**yarr** (yet another rss reader) is a web-based feed aggregator which can be used both
|
||||
as a desktop application and a personal self-hosted server.
|
||||
|
||||
It is written in Go with the frontend in Vue.js. The storage is backed by SQLite.
|
||||
The app is a single binary with an embedded database (SQLite).
|
||||
|
||||

|
||||
|
||||
## usage
|
||||
|
||||
The latest prebuilt binaries for Linux/MacOS/Windows are available
|
||||
[here](https://github.com/nkanaev/yarr/releases/latest).
|
||||
The latest prebuilt binaries for Linux/MacOS/Windows AMD64 are available
|
||||
[here](https://github.com/nkanaev/yarr/releases/latest). Installation instructions:
|
||||
|
||||
### macos
|
||||
* Command Arges
|
||||
|
||||
```
|
||||
-addr string
|
||||
address to run server on (default "127.0.0.1:7070")
|
||||
-auth-file path
|
||||
path to a file containing username:password
|
||||
-base string
|
||||
base path of the service url
|
||||
-cert-file path
|
||||
path to cert file for https
|
||||
-db path
|
||||
storage file path
|
||||
-key-file path
|
||||
path to key file for https
|
||||
-log-file path
|
||||
path to log file to use instead of stdout
|
||||
-open
|
||||
open the server in browser
|
||||
-version
|
||||
print application version
|
||||
```
|
||||
|
||||
Download `yarr-*-macos64.zip`, unzip it, place `yarr.app` in `/Applications` folder.
|
||||
* MacOS
|
||||
|
||||
The binaries are not signed, because the author doesn't want to buy a certificate.
|
||||
Apple hates cheapskate developers, therefore the OS will refuse to run the application.
|
||||
To bypass these measures, you can run the command:
|
||||
Download `yarr-*-macos64.zip`, unzip it, place `yarr.app` in `/Applications` folder, [open the app][macos-open], click the anchor menu bar icon, select "Open".
|
||||
|
||||
xattr -d com.apple.quarantine /Applications/yarr.app
|
||||
* Windows
|
||||
|
||||
### windows
|
||||
Download `yarr-*-windows64.zip`, unzip it, open `yarr.exe`, click the anchor system tray icon, select "Open".
|
||||
|
||||
Download `yarr-*-windows32.zip`, unzip it, place wherever you'd like to
|
||||
(`C:\Program Files` or Recycle Bin). Create a shortcut manually if you'd like to.
|
||||
* Linux
|
||||
|
||||
Microsoft doesn't like cheapskate developers too,
|
||||
but might only gently warn you about that, which you can safely ignore.
|
||||
Download `yarr-*-linux64.zip`, unzip it, place `yarr` in `$HOME/.local/bin`
|
||||
and run [the script](etc/install-linux.sh).
|
||||
|
||||
### linux
|
||||
[macos-open]: https://support.apple.com/en-gb/guide/mac-help/mh40616/mac
|
||||
|
||||
The Linux version doesn't come with the desktop environment integration.
|
||||
For easy access on DE it is recommended to create a desktop menu entry by
|
||||
by following the steps below:
|
||||
* Docker environment
|
||||
|
||||
You can use docker or docker-compose to run yarr, and you can also use environment variables to configure startup parameters.
|
||||
|
||||
- `YARR_ADDR` :address to run server on (default "127.0.0.1:7070")
|
||||
- `YARR_BASE` :base path of the service url
|
||||
- `YARR_AUTHFILE` :path to a file containing username:password
|
||||
- `YARR_CERTFILE` :path to cert file for https
|
||||
- `YARR_KEYFILE` :path to key file for https
|
||||
- `YARR_DB` :storage file path
|
||||
- `YARR_LOGFILE` :path to log file to use instead of stdout
|
||||
|
||||
* Docker run:
|
||||
```
|
||||
docker run -d \
|
||||
--name yarr \
|
||||
-p 25255:7070 \
|
||||
-e YARR_AUTHFILE="/data/.auth.list" \
|
||||
-v /data/yarr-data:/data \
|
||||
--restart always \
|
||||
arsfeld/yarr:latest
|
||||
```
|
||||
|
||||
* Docker-Compose Run
|
||||
|
||||
unzip -x yarr*.zip
|
||||
sudo mv yarr /usr/local/bin/yarr
|
||||
sudo nano /usr/local/share/applications/yarr.desktop
|
||||
Create a file named `.auth.list` under the `/data/` directory, and the content format should be: `username:password`.
|
||||
Then start by running docker-compose up -d and enjoy!
|
||||
|
||||
and pasting the content:
|
||||
```yaml
|
||||
version: '3.3'
|
||||
services:
|
||||
yarr:
|
||||
container_name: yarr
|
||||
image: 'arsfeld/yarr:latest'
|
||||
restart: always
|
||||
ports:
|
||||
- '25255:7070'
|
||||
environment:
|
||||
YARR_AUTHFILE: "/data/.auth.list"
|
||||
volumes:
|
||||
- '/data/yarr-data:/data'
|
||||
```
|
||||
|
||||
* See more:
|
||||
|
||||
[Desktop Entry]
|
||||
Name=yarr
|
||||
Exec=/usr/local/bin/yarr -open
|
||||
Icon=rss
|
||||
Type=Application
|
||||
Categories=Internet;
|
||||
|
||||
For self-hosting, see `yarr -h` for auth, tls & server configuration flags.
|
||||
|
||||
## build
|
||||
|
||||
Install `Go >= 1.16` and `gcc`. Get the source code:
|
||||
|
||||
git clone https://github.com/nkanaev/yarr.git
|
||||
|
||||
Then run one of the corresponding commands:
|
||||
|
||||
# create an executable for the host os
|
||||
make build_macos # -> _output/macos/yarr.app
|
||||
make build_linux # -> _output/linux/yarr
|
||||
make build_windows # -> _output/windows/yarr.exe
|
||||
|
||||
# ... or start a dev server locally
|
||||
make serve # starts a server at http://localhost:7070
|
||||
|
||||
# ... or build a docker image
|
||||
docker build -t yarr .
|
||||
* [Building from source code](doc/build.md)
|
||||
* [Fever API support](doc/fever.md)
|
||||
|
||||
## credits
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"io"
|
||||
"io/fs"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
@@ -29,9 +30,18 @@ func Template(path string) *template.Template {
|
||||
if !found {
|
||||
tmpl = template.Must(template.New(path).Delims("{%", "%}").Funcs(template.FuncMap{
|
||||
"inline": func(svg string) template.HTML {
|
||||
svgfile, _ := FS.Open("graphicarts/" + svg)
|
||||
content, _ := ioutil.ReadAll(svgfile)
|
||||
svgfile.Close()
|
||||
svgfile, err := FS.Open("graphicarts/" + svg)
|
||||
// should never happen
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer svgfile.Close()
|
||||
|
||||
content, err := ioutil.ReadAll(svgfile)
|
||||
// should never happen
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return template.HTML(content)
|
||||
},
|
||||
}).ParseFS(FS, path))
|
||||
|
@@ -1,5 +1,3 @@
|
||||
// +build release
|
||||
|
||||
package assets
|
||||
|
||||
import "embed"
|
||||
|
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-chevron-down"><polyline points="6 9 12 15 18 9"></polyline></svg>
|
Before Width: | Height: | Size: 269 B |
BIN
src/assets/graphicarts/favicon.png
Normal file
After Width: | Height: | Size: 1.6 KiB |
9
src/assets/graphicarts/favicon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-anchor-favicon">
|
||||
<circle cx="12" cy="5" r="3" stroke-width="4" stroke="#ffffff"></circle>
|
||||
<line x1="12" y1="22" x2="12" y2="8" stroke-width="4" stroke="#ffffff"></line>
|
||||
<path d="M5 12H2a10 10 0 0 0 20 0h-3" stroke-width="4" stroke="#ffffff"></path>
|
||||
|
||||
<circle cx="12" cy="5" r="3"></circle>
|
||||
<line x1="12" y1="22" x2="12" y2="8"></line>
|
||||
<path d="M5 12H2a10 10 0 0 0 20 0h-3"></path>
|
||||
</svg>
|
After Width: | Height: | Size: 603 B |
Before Width: | Height: | Size: 103 KiB |
Before Width: | Height: | Size: 2.2 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-list"><line x1="8" y1="6" x2="21" y2="6"></line><line x1="8" y1="12" x2="21" y2="12"></line><line x1="8" y1="18" x2="21" y2="18"></line><line x1="3" y1="6" x2="3.01" y2="6"></line><line x1="3" y1="12" x2="3.01" y2="12"></line><line x1="3" y1="18" x2="3.01" y2="18"></line></svg>
|
Before Width: | Height: | Size: 482 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-menu"><line x1="3" y1="12" x2="21" y2="12"></line><line x1="3" y1="6" x2="21" y2="6"></line><line x1="3" y1="18" x2="21" y2="18"></line></svg>
|
Before Width: | Height: | Size: 346 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-more-vertical"><circle cx="12" cy="12" r="1"></circle><circle cx="12" cy="5" r="1"></circle><circle cx="12" cy="19" r="1"></circle></svg>
|
Before Width: | Height: | Size: 341 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-settings"><circle cx="12" cy="12" r="3"></circle><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path></svg>
|
Before Width: | Height: | Size: 1011 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-trash-2"><polyline points="3 6 5 6 21 6"></polyline><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"></path><line x1="10" y1="11" x2="10" y2="17"></line><line x1="14" y1="11" x2="14" y2="17"></line></svg>
|
Before Width: | Height: | Size: 448 B |
@@ -5,7 +5,9 @@
|
||||
<title>yarr!</title>
|
||||
<link rel="stylesheet" href="./static/stylesheets/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="./static/stylesheets/app.css">
|
||||
<link rel="icon shortcut" href="./static/graphicarts/icon.png">
|
||||
<link rel="icon" href="./static/graphicarts/favicon.svg" type="image/svg+xml">
|
||||
<link rel="alternate icon" href="./static/graphicarts/favicon.png" type="image/png">
|
||||
<link rel="manifest" href="./manifest.json" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<script>
|
||||
window.app = window.app || {}
|
||||
@@ -57,6 +59,18 @@
|
||||
|
||||
<div class="dropdown-divider"></div>
|
||||
|
||||
<header class="dropdown-header">Theme</header>
|
||||
<div class="row text-center m-0">
|
||||
<button class="btn btn-link col-4 px-0 rounded-0"
|
||||
:class="'theme-'+t"
|
||||
@click.stop="theme.name = t"
|
||||
v-for="t in ['light', 'sepia', 'night']">
|
||||
<span class="icon" v-if="theme.name == t">{% inline "check.svg" %}</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="dropdown-divider"></div>
|
||||
|
||||
<header class="dropdown-header">Auto Refresh</header>
|
||||
<div class="row text-center m-0">
|
||||
<button class="dropdown-item col-4 px-0" :class="{active: !refreshRate}" @click.stop="refreshRate = 0">0</button>
|
||||
@@ -117,9 +131,10 @@
|
||||
<div v-for="folder in foldersWithFeeds">
|
||||
<label class="selectgroup mt-1"
|
||||
:class="{'d-none': filterSelected
|
||||
&& !(current.folder.id == folder.id || current.feed.folder_id == folder.id)
|
||||
&& !filteredFolderStats[folder.id]
|
||||
&& (!itemSelectedDetails || feedsById[itemSelectedDetails.feed_id].folder_id != folder.id)}">
|
||||
<input type="radio" name="feed" :value="'folder:'+folder.id" v-model="feedSelected">
|
||||
&& (!itemSelectedDetails || (feedsById[itemSelectedDetails.feed_id] || {}).folder_id != folder.id)}">
|
||||
<input type="radio" name="feed" :value="'folder:'+folder.id" v-model="feedSelected" v-if="folder.id">
|
||||
<div class="selectgroup-label d-flex align-items-center w-100" v-if="folder.id">
|
||||
<span class="icon mr-2"
|
||||
:class="{expanded: folder.is_expanded}"
|
||||
@@ -133,6 +148,7 @@
|
||||
<div v-show="!folder.id || folder.is_expanded" class="mt-1" :class="{'pl-3': folder.id}">
|
||||
<label class="selectgroup"
|
||||
:class="{'d-none': filterSelected
|
||||
&& !(current.feed.id == feed.id)
|
||||
&& !filteredFeedStats[feed.id]
|
||||
&& (!itemSelectedDetails || itemSelectedDetails.feed_id != feed.id)}"
|
||||
v-for="feed in folder.feeds">
|
||||
@@ -177,11 +193,16 @@
|
||||
title="Mark All Read">
|
||||
<span class="icon">{% inline "check.svg" %}</span>
|
||||
</button>
|
||||
|
||||
|
||||
<button class="btn btn-link toolbar-item px-2 ml-2" v-if="!current.type" disabled>
|
||||
<span class="icon">{% inline "more-horizontal.svg" %}</span>
|
||||
</button>
|
||||
<dropdown class="settings-dropdown"
|
||||
toggle-class="btn btn-link toolbar-item px-2 ml-2"
|
||||
drop="right"
|
||||
title="Feed Settings"
|
||||
v-if="!filterSelected && current.type == 'feed'">
|
||||
v-if="current.type == 'feed'">
|
||||
<template v-slot:button>
|
||||
<span class="icon">{% inline "more-horizontal.svg" %}</span>
|
||||
</template>
|
||||
@@ -226,7 +247,7 @@
|
||||
toggle-class="btn btn-link toolbar-item px-2 ml-2"
|
||||
title="Folder Settings"
|
||||
drop="right"
|
||||
v-if="!filterSelected && current.type == 'folder'">
|
||||
v-if="current.type == 'folder'">
|
||||
<template v-slot:button>
|
||||
<span class="icon">{% inline "more-horizontal.svg" %}</span>
|
||||
</template>
|
||||
@@ -253,14 +274,14 @@
|
||||
<span class="icon icon-small mr-1" v-if="item.status=='starred'">{% inline "star-full.svg" %}</span>
|
||||
</transition>
|
||||
<small class="flex-fill text-truncate mr-1">
|
||||
{{ feedsById[item.feed_id].title }}
|
||||
{{ (feedsById[item.feed_id] || {}).title }}
|
||||
</small>
|
||||
<small class="flex-shrink-0"><relative-time :val="item.date"/></small>
|
||||
<small class="flex-shrink-0"><relative-time v-bind:title="formatDate(item.date)" :val="item.date"/></small>
|
||||
</div>
|
||||
<div>{{ item.title || 'untitled' }}</div>
|
||||
</div>
|
||||
</label>
|
||||
<button class="btn btn-link btn-block loading my-3" v-if="itemsPage.cur < itemsPage.num"></button>
|
||||
<button class="btn btn-link btn-block loading my-3" v-if="itemsHasMore"></button>
|
||||
</div>
|
||||
<div class="px-3 py-2 border-top text-danger text-break" v-if="feed_errors[current.feed.id]">
|
||||
{{ feed_errors[current.feed.id] }}
|
||||
@@ -285,14 +306,6 @@
|
||||
<template v-slot:button>
|
||||
<span class="icon">{% inline "sliders.svg" %}</span>
|
||||
</template>
|
||||
<div class="row text-center m-0">
|
||||
<button class="btn btn-link col-4 px-0 rounded-0"
|
||||
:class="'theme-'+t"
|
||||
@click.stop="theme.name = t"
|
||||
v-for="t in ['light', 'sepia', 'night']">
|
||||
<span class="icon" v-if="theme.name == t">{% inline "check.svg" %}</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<button class="dropdown-item" :class="{active: !theme.font}" @click.stop="theme.font = ''">sans-serif</button>
|
||||
<button class="dropdown-item font-serif" :class="{active: theme.font == 'serif'}" @click.stop="theme.font = 'serif'">serif</button>
|
||||
@@ -322,17 +335,19 @@
|
||||
class="content px-4 pt-3 pb-5 border-top overflow-auto"
|
||||
:class="{'font-serif': theme.font == 'serif', 'font-monospace': theme.font == 'monospace'}"
|
||||
:style="{'font-size': theme.size + 'rem'}">
|
||||
<h1><b>{{ itemSelectedDetails.title || 'untitled' }}</b></h1>
|
||||
<div class="text-muted">
|
||||
<div>{{ feedsById[itemSelectedDetails.feed_id].title }}</div>
|
||||
<time>{{ formatDate(itemSelectedDetails.date) }}</time>
|
||||
<div class="content-wrapper">
|
||||
<h1><b>{{ itemSelectedDetails.title || 'untitled' }}</b></h1>
|
||||
<div class="text-muted">
|
||||
<div>{{ (feedsById[itemSelectedDetails.feed_id] || {}).title }}</div>
|
||||
<time>{{ formatDate(itemSelectedDetails.date) }}</time>
|
||||
</div>
|
||||
<hr>
|
||||
<div v-if="!itemSelectedReadability">
|
||||
<img :src="itemSelectedDetails.image" v-if="itemSelectedDetails.image" class="mb-3">
|
||||
<audio class="w-100" controls v-if="itemSelectedDetails.podcast_url" :src="itemSelectedDetails.podcast_url"></audio>
|
||||
</div>
|
||||
<div v-html="itemSelectedContent"></div>
|
||||
</div>
|
||||
<hr>
|
||||
<div v-if="!itemSelectedReadability">
|
||||
<img :src="itemSelectedDetails.image" v-if="itemSelectedDetails.image" class="mb-3">
|
||||
<audio class="w-100" controls v-if="itemSelectedDetails.podcast_url" :src="itemSelectedDetails.podcast_url"></audio>
|
||||
</div>
|
||||
<div v-html="itemSelectedContent"></div>
|
||||
</div>
|
||||
</div>
|
||||
<modal :open="!!settings" @hide="settings = ''">
|
||||
@@ -343,14 +358,14 @@
|
||||
<p class="cursor-default"><b>New Feed</b></p>
|
||||
<form action="" @submit.prevent="createFeed(event)" class="mt-4">
|
||||
<label for="feed-url">URL</label>
|
||||
<input id="feed-url" name="url" type="url" class="form-control" required autocomplete="off" :readonly="feedNewChoice.length > 0">
|
||||
<input id="feed-url" name="url" type="url" class="form-control" required autocomplete="off" :readonly="feedNewChoice.length > 0" placeholder="https://example.com/feed" v-focus V-model="autoFeedUrl">
|
||||
<label for="feed-folder" class="mt-3 d-block">
|
||||
Folder
|
||||
<a href="#" class="float-right text-decoration-none" @click.prevent="createNewFeedFolder()">new folder</a>
|
||||
</label>
|
||||
<select class="form-control" id="feed-folder" name="folder_id" ref="newFeedFolder">
|
||||
<option value="">---</option>
|
||||
<option :value="folder.id" v-for="folder in folders">{{ folder.title }}</option>
|
||||
<option :value="folder.id" v-for="folder in folders" :selected="folder.id === current.feed.folder_id || folder.id === current.folder.id">{{ folder.title }}</option>
|
||||
</select>
|
||||
<div class="mt-4" v-if="feedNewChoice.length">
|
||||
<p class="mb-2">
|
||||
|
@@ -105,7 +105,7 @@
|
||||
return api('post', './logout')
|
||||
},
|
||||
crawl: function(url) {
|
||||
return api('get', './page?url=' + url).then(json)
|
||||
return api('get', './page?url=' + encodeURIComponent(url)).then(json)
|
||||
}
|
||||
}
|
||||
})()
|
||||
|
@@ -21,6 +21,12 @@ Vue.directive('scroll', {
|
||||
},
|
||||
})
|
||||
|
||||
Vue.directive('focus', {
|
||||
inserted: function(el) {
|
||||
el.focus()
|
||||
}
|
||||
})
|
||||
|
||||
Vue.component('drag', {
|
||||
props: ['width'],
|
||||
template: '<div class="drag"></div>',
|
||||
@@ -47,13 +53,13 @@ Vue.component('drag', {
|
||||
})
|
||||
|
||||
Vue.component('dropdown', {
|
||||
props: ['class', 'toggle-class', 'ref', 'drop'],
|
||||
props: ['class', 'toggle-class', 'ref', 'drop', 'title'],
|
||||
data: function() {
|
||||
return {open: false}
|
||||
},
|
||||
template: `
|
||||
<div class="dropdown" :class="$attrs.class">
|
||||
<button ref="btn" @click="toggle" :class="btnToggleClass"><slot name="button"></slot></button>
|
||||
<button ref="btn" @click="toggle" :class="btnToggleClass" :title="$props.title"><slot name="button"></slot></button>
|
||||
<div ref="menu" class="dropdown-menu" :class="{show: open}"><slot v-if="open"></slot></div>
|
||||
</div>
|
||||
`,
|
||||
@@ -177,10 +183,18 @@ Vue.component('relative-time', {
|
||||
})
|
||||
|
||||
var vm = new Vue({
|
||||
mounted:function(){
|
||||
const subscribe_to = new URLSearchParams(window.location.search).get('subscribe_to');
|
||||
if(subscribe_to){
|
||||
vm.settings = 'create'
|
||||
//document.getElementById("feed-url").value = subscribe_to;
|
||||
this.autoFeedUrl = subscribe_to;
|
||||
}
|
||||
},
|
||||
created: function() {
|
||||
this.refreshStats()
|
||||
.then(this.refreshFeeds.bind(this))
|
||||
.then(this.refreshItems.bind(this))
|
||||
.then(this.refreshItems.bind(this, false))
|
||||
|
||||
api.feeds.list_errors().then(function(errors) {
|
||||
vm.feed_errors = errors
|
||||
@@ -197,10 +211,7 @@ var vm = new Vue({
|
||||
'feedNewChoice': [],
|
||||
'feedNewChoiceSelected': '',
|
||||
'items': [],
|
||||
'itemsPage': {
|
||||
'cur': 1,
|
||||
'num': 1,
|
||||
},
|
||||
'itemsHasMore': true,
|
||||
'itemSelected': null,
|
||||
'itemSelectedDetails': null,
|
||||
'itemSelectedReadability': '',
|
||||
@@ -304,13 +315,13 @@ var vm = new Vue({
|
||||
},
|
||||
'filterSelected': function(newVal, oldVal) {
|
||||
if (oldVal === undefined) return // do nothing, initial setup
|
||||
api.settings.update({filter: newVal}).then(this.refreshItems.bind(this))
|
||||
api.settings.update({filter: newVal}).then(this.refreshItems.bind(this, false))
|
||||
this.itemSelected = null
|
||||
this.computeStats()
|
||||
},
|
||||
'feedSelected': function(newVal, oldVal) {
|
||||
if (oldVal === undefined) return // do nothing, initial setup
|
||||
api.settings.update({feed: newVal}).then(this.refreshItems.bind(this))
|
||||
api.settings.update({feed: newVal}).then(this.refreshItems.bind(this, false))
|
||||
this.itemSelected = null
|
||||
if (this.$refs.itemlist) this.$refs.itemlist.scrollTop = 0
|
||||
},
|
||||
@@ -339,7 +350,7 @@ var vm = new Vue({
|
||||
}, 500),
|
||||
'itemSortNewestFirst': function(newVal, oldVal) {
|
||||
if (oldVal === undefined) return // do nothing, initial setup
|
||||
api.settings.update({sort_newest_first: newVal}).then(this.refreshItems.bind(this))
|
||||
api.settings.update({sort_newest_first: newVal}).then(vm.refreshItems.bind(this, false))
|
||||
},
|
||||
'feedListWidth': debounce(function(newVal, oldVal) {
|
||||
if (oldVal === undefined) return // do nothing, initial setup
|
||||
@@ -404,34 +415,49 @@ var vm = new Vue({
|
||||
vm.feeds = values[1]
|
||||
})
|
||||
},
|
||||
refreshItems: function() {
|
||||
refreshItems: function(loadMore) {
|
||||
if (this.feedSelected === null) {
|
||||
vm.items = []
|
||||
vm.itemsPage = {'cur': 1, 'num': 1}
|
||||
vm.itemsHasMore = false
|
||||
return
|
||||
}
|
||||
|
||||
var query = this.getItemsQuery()
|
||||
if (loadMore) {
|
||||
query.after = vm.items[vm.items.length-1].id
|
||||
}
|
||||
|
||||
this.loading.items = true
|
||||
return api.items.list(query).then(function(data) {
|
||||
vm.items = data.list
|
||||
vm.itemsPage = data.page
|
||||
api.items.list(query).then(function(data) {
|
||||
if (loadMore) {
|
||||
vm.items = vm.items.concat(data.list)
|
||||
} else {
|
||||
vm.items = data.list
|
||||
}
|
||||
vm.itemsHasMore = data.has_more
|
||||
vm.loading.items = false
|
||||
|
||||
// load more if there's some space left at the bottom of the item list.
|
||||
vm.$nextTick(function() {
|
||||
if (vm.itemsHasMore && !vm.loading.items && vm.itemListCloseToBottom()) {
|
||||
vm.refreshItems(true)
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
itemListCloseToBottom: function() {
|
||||
// approx. vertical space at the bottom of the list (loading el & paddings) when 1rem = 16px
|
||||
var bottomSpace = 70
|
||||
var scale = (parseFloat(getComputedStyle(document.documentElement).fontSize) || 16) / 16
|
||||
|
||||
var el = this.$refs.itemlist
|
||||
var closeToBottom = (el.scrollHeight - el.scrollTop - el.offsetHeight) < bottomSpace * scale
|
||||
return closeToBottom
|
||||
},
|
||||
loadMoreItems: function(event, el) {
|
||||
if (this.itemsPage.cur >= this.itemsPage.num) return
|
||||
if (!this.itemsHasMore) return
|
||||
if (this.loading.items) return
|
||||
var closeToBottom = (el.scrollHeight - el.scrollTop - el.offsetHeight) < 50
|
||||
if (closeToBottom) {
|
||||
this.loading.moreitems = true
|
||||
var query = this.getItemsQuery()
|
||||
query.page = this.itemsPage.cur + 1
|
||||
api.items.list(query).then(function(data) {
|
||||
vm.items = vm.items.concat(data.list)
|
||||
vm.itemsPage = data.page
|
||||
vm.loading.items = false
|
||||
})
|
||||
}
|
||||
if (this.itemListCloseToBottom()) this.refreshItems(true)
|
||||
},
|
||||
markItemsRead: function() {
|
||||
var query = this.getItemsQuery()
|
||||
@@ -439,6 +465,7 @@ var vm = new Vue({
|
||||
vm.items = []
|
||||
vm.itemsPage = {'cur': 1, 'num': 1}
|
||||
vm.itemSelected = null
|
||||
vm.itemsHasMore = false
|
||||
vm.refreshStats()
|
||||
})
|
||||
},
|
||||
@@ -498,10 +525,7 @@ var vm = new Vue({
|
||||
deleteFolder: function(folder) {
|
||||
if (confirm('Are you sure you want to delete ' + folder.title + '?')) {
|
||||
api.folders.delete(folder.id).then(function() {
|
||||
if (vm.feedSelected === 'folder:'+folder.id) {
|
||||
vm.items = []
|
||||
vm.feedSelected = ''
|
||||
}
|
||||
vm.feedSelected = null
|
||||
vm.refreshStats()
|
||||
vm.refreshFeeds()
|
||||
})
|
||||
@@ -518,12 +542,7 @@ var vm = new Vue({
|
||||
deleteFeed: function(feed) {
|
||||
if (confirm('Are you sure you want to delete ' + feed.title + '?')) {
|
||||
api.feeds.delete(feed.id).then(function() {
|
||||
// unselect feed to prevent reading properties of null in template
|
||||
var isSelected = !vm.feedSelected
|
||||
|| (vm.feedSelected === 'feed:'+feed.id
|
||||
|| (feed.folder_id && vm.feedSelected === 'folder:'+feed.folder_id));
|
||||
if (isSelected) vm.feedSelected = null
|
||||
|
||||
vm.feedSelected = null
|
||||
vm.refreshStats()
|
||||
vm.refreshFeeds()
|
||||
})
|
||||
@@ -633,10 +652,7 @@ var vm = new Vue({
|
||||
fetchAllFeeds: function() {
|
||||
if (this.loading.feeds) return
|
||||
api.feeds.refresh().then(function() {
|
||||
// NOTE: this is hacky
|
||||
setTimeout(function() {
|
||||
vm.refreshStats()
|
||||
}, 1000)
|
||||
vm.refreshStats()
|
||||
})
|
||||
},
|
||||
computeStats: function() {
|
||||
@@ -668,6 +684,7 @@ var vm = new Vue({
|
||||
this.filteredTotalStats = statsTotal
|
||||
},
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
vm.$mount('#app')
|
||||
|
@@ -165,6 +165,24 @@ var keybindings = {
|
||||
"3": shortcutFunctions.showAll,
|
||||
}
|
||||
|
||||
var codebindings = {
|
||||
"KeyO": shortcutFunctions.openItemLink,
|
||||
"KeyI": shortcutFunctions.toggleReadability,
|
||||
//"r": shortcutFunctions.toggleItemRead,
|
||||
//"KeyR": shortcutFunctions.markAllRead,
|
||||
"KeyS": shortcutFunctions.toggleItemStarred,
|
||||
"Slash": shortcutFunctions.focusSearch,
|
||||
"KeyJ": shortcutFunctions.nextItem,
|
||||
"KeyK": shortcutFunctions.previousItem,
|
||||
"KeyL": shortcutFunctions.nextFeed,
|
||||
"KeyH": shortcutFunctions.previousFeed,
|
||||
"KeyF": shortcutFunctions.scrollForward,
|
||||
"KeyB": shortcutFunctions.scrollBackward,
|
||||
"Digit1": shortcutFunctions.showUnread,
|
||||
"Digit2": shortcutFunctions.showStarred,
|
||||
"Digit3": shortcutFunctions.showAll,
|
||||
}
|
||||
|
||||
function isTextBox(element) {
|
||||
var tagName = element.tagName.toLowerCase()
|
||||
// Input elements that aren't text
|
||||
@@ -179,10 +197,10 @@ function isTextBox(element) {
|
||||
document.addEventListener('keydown',function(event) {
|
||||
// Ignore while focused on text or
|
||||
// when using modifier keys (to not clash with browser behaviour)
|
||||
if (isTextBox(event.target) || event.metaKey || event.ctrlKey) {
|
||||
if (isTextBox(event.target) || event.metaKey || event.ctrlKey || event.altKey) {
|
||||
return
|
||||
}
|
||||
var keybindFunction = keybindings[event.key]
|
||||
var keybindFunction = keybindings[event.key] || codebindings[event.code]
|
||||
if (keybindFunction) {
|
||||
event.preventDefault()
|
||||
keybindFunction()
|
||||
|
@@ -5,7 +5,8 @@
|
||||
<title>yarr!</title>
|
||||
<link rel="stylesheet" href="./static/stylesheets/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="./static/stylesheets/app.css">
|
||||
<link rel="icon shortcut" href="./static/graphicarts/icon.png">
|
||||
<link rel="icon" href="./static/graphicarts/favicon.svg" type="image/svg+xml">
|
||||
<link rel="alternate icon" href="./static/graphicarts/favicon.png" type="image/png">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<style>
|
||||
form {
|
||||
@@ -30,7 +31,7 @@
|
||||
<div class="form-group">
|
||||
<label for="username">Username</label>
|
||||
<input name="username" class="form-control" id="username" autocomplete="off"
|
||||
value="{% if .username %}{% .username %}{% end %}" required>
|
||||
value="{% if .username %}{% .username %}{% end %}" required autofocus>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="password">Password</label>
|
||||
|
@@ -2,8 +2,11 @@
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
body {
|
||||
html {
|
||||
font-size: 15px !important;
|
||||
}
|
||||
|
||||
body {
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
@@ -85,6 +88,10 @@ select.form-control:not([multiple]):not([size]) {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.table-compact {
|
||||
color: unset !important;
|
||||
}
|
||||
|
||||
.table-compact tr td:first-child {
|
||||
padding-left: 0;
|
||||
}
|
||||
@@ -160,7 +167,9 @@ select.form-control:not([multiple]):not([size]) {
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
z-index: -1;
|
||||
top: 0; left: 0;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.selectgroup + .selectgroup {
|
||||
@@ -349,6 +358,11 @@ select.form-control:not([multiple]):not([size]) {
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.content-wrapper {
|
||||
max-width: 60rem;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.content img, .content video {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
@@ -360,6 +374,27 @@ select.form-control:not([multiple]):not([size]) {
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.content .video-wrapper {
|
||||
position: relative;
|
||||
display: block;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.content .video-wrapper::before {
|
||||
display: block;
|
||||
padding-top: 56.25%; /* 16x9 aspect ratio */
|
||||
content: "";
|
||||
}
|
||||
|
||||
.content .video-wrapper iframe {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.content pre {
|
||||
overflow-x: auto;
|
||||
color: inherit;
|
||||
@@ -395,6 +430,11 @@ select.form-control:not([multiple]):not([size]) {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.content p {
|
||||
margin-top: 1rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
/* theme: light */
|
||||
|
||||
button.theme-light {
|
||||
@@ -402,11 +442,11 @@ button.theme-light {
|
||||
}
|
||||
|
||||
a,
|
||||
.btn-link:hover,
|
||||
.toolbar-item.active {
|
||||
.btn-link:hover {
|
||||
color: #0080d4;
|
||||
}
|
||||
|
||||
.toolbar-item.active,
|
||||
.dropdown-item.active,
|
||||
.dropdown-item:active,
|
||||
.selectgroup input:checked + .selectgroup-label {
|
||||
@@ -485,6 +525,46 @@ a,
|
||||
margin: 0 !important;
|
||||
}
|
||||
|
||||
/* Beautify the scroll bar. */
|
||||
:root {
|
||||
--custom-thumb-color: #6c757d;
|
||||
--custom-track-color: rgba(0, 0, 0, 0);
|
||||
--custom-width: thin;
|
||||
--custom-thumb-color-hover: #ef4c4c;
|
||||
--custom-track-color-hover: rgba(0, 0, 0, 0);
|
||||
--webkit-scrollbar-width-height: 7px;
|
||||
--webkit-scrollbar-border-radius: 6px;
|
||||
--workaround-gh-scrollbars: 0;
|
||||
}
|
||||
*:not(select) {
|
||||
scrollbar-color: var(--custom-thumb-color) var(--custom-track-color) !important;
|
||||
scrollbar-width: var(--custom-width) !important;
|
||||
}
|
||||
/* Chrome and derivatives*/
|
||||
::-webkit-scrollbar {
|
||||
max-width: var(--webkit-scrollbar-width-height) !important;
|
||||
max-height: var(--webkit-scrollbar-width-height) !important;
|
||||
background: var(--custom-track-color) !important;
|
||||
}
|
||||
::-webkit-scrollbar-corner,
|
||||
::-webkit-scrollbar-track,
|
||||
::-webkit-scrollbar-track-piece {
|
||||
background: var(--custom-track-color) !important;
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: var(--custom-thumb-color) !important;
|
||||
border-radius: var(--webkit-scrollbar-border-radius) !important;
|
||||
}
|
||||
::-webkit-scrollbar-corner:hover,
|
||||
::-webkit-scrollbar-track:hover,
|
||||
::-webkit-scrollbar-track-piece:hover {
|
||||
background: var(--custom-track-color-hover) !important;
|
||||
}
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--custom-thumb-color-hover) !important;
|
||||
}
|
||||
|
||||
|
||||
/* responsive layout
|
||||
|
||||
tablet:
|
||||
@@ -567,4 +647,4 @@ a,
|
||||
.toolbar-search {
|
||||
padding: .5rem;
|
||||
}
|
||||
}
|
||||
}
|
@@ -2,6 +2,7 @@ package htmlutil
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func Any(els []string, el string, match func(string, string) bool) bool {
|
||||
@@ -31,3 +32,7 @@ func URLDomain(val string) string {
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func IsAPossibleLink(val string) bool {
|
||||
return strings.HasPrefix(val, "http://") || strings.HasPrefix(val, "https://")
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ package readability
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
@@ -59,6 +60,9 @@ func ExtractContent(page io.Reader) (string, error) {
|
||||
best = body
|
||||
break
|
||||
}
|
||||
if best == nil {
|
||||
return "", errors.New("failed to extract content")
|
||||
}
|
||||
}
|
||||
//log.Printf("[Readability] TopCandidate: %v", topCandidate)
|
||||
|
||||
|
@@ -58,19 +58,36 @@ func Sanitize(baseURL, input string) string {
|
||||
attrNames, htmlAttributes := sanitizeAttributes(baseURL, tagName, token.Attr)
|
||||
|
||||
if hasRequiredAttributes(tagName, attrNames) {
|
||||
wrap := isVideoIframe(token)
|
||||
if wrap {
|
||||
buffer.WriteString(`<div class="video-wrapper">`)
|
||||
}
|
||||
|
||||
if len(attrNames) > 0 {
|
||||
buffer.WriteString("<" + tagName + " " + htmlAttributes + ">")
|
||||
} else {
|
||||
buffer.WriteString("<" + tagName + ">")
|
||||
}
|
||||
|
||||
tagStack = append(tagStack, tagName)
|
||||
if tagName == "iframe" {
|
||||
// autoclose iframes
|
||||
buffer.WriteString("</iframe>")
|
||||
if wrap {
|
||||
buffer.WriteString("</div>")
|
||||
}
|
||||
} else {
|
||||
tagStack = append(tagStack, tagName)
|
||||
}
|
||||
}
|
||||
} else if isBlockedTag(tagName) {
|
||||
blacklistedTagDepth++
|
||||
}
|
||||
case html.EndTagToken:
|
||||
tagName := token.Data
|
||||
// iframes are autoclosed. see above
|
||||
if tagName == "iframe" {
|
||||
continue
|
||||
}
|
||||
if isValidTag(tagName) && inList(tagName, tagStack) {
|
||||
buffer.WriteString(fmt.Sprintf("</%s>", tagName))
|
||||
} else if isBlockedTag(tagName) {
|
||||
@@ -347,7 +364,6 @@ func isBlockedTag(tagName string) bool {
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
One or more strings separated by commas, indicating possible image sources for the user agent to use.
|
||||
|
||||
Each string is composed of:
|
||||
@@ -355,7 +371,6 @@ Each string is composed of:
|
||||
- Optionally, whitespace followed by one of:
|
||||
- A width descriptor (a positive integer directly followed by w). The width descriptor is divided by the source size given in the sizes attribute to calculate the effective pixel density.
|
||||
- A pixel density descriptor (a positive floating point number directly followed by x).
|
||||
|
||||
*/
|
||||
func sanitizeSrcsetAttr(baseURL, value string) string {
|
||||
var sanitizedSources []string
|
||||
@@ -417,3 +432,22 @@ func isValidDataAttribute(value string) bool {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isVideoIframe(token html.Token) bool {
|
||||
videoWhitelist := map[string]bool{
|
||||
"player.bilibili.com": true,
|
||||
"player.vimeo.com": true,
|
||||
"www.dailymotion.com": true,
|
||||
"www.youtube-nocookie.com": true,
|
||||
"www.youtube.com": true,
|
||||
}
|
||||
if token.Data == "iframe" {
|
||||
for _, attr := range token.Attr {
|
||||
if attr.Key == "src" {
|
||||
domain := htmlutil.URLDomain(attr.Val)
|
||||
return videoWhitelist[domain]
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@@ -163,6 +163,16 @@ func TestInvalidNestedTag(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidIFrame(t *testing.T) {
|
||||
input := `<iframe src="http://example.org/"></iframe>`
|
||||
expected := `<iframe src="http://example.org/" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe>`
|
||||
output := Sanitize("http://example.org/", input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf("Wrong output:\nwant: %s\nhave: %s", expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidIFrame(t *testing.T) {
|
||||
input := `<iframe src="http://example.org/"></iframe>`
|
||||
expected := ``
|
||||
@@ -175,7 +185,7 @@ func TestInvalidIFrame(t *testing.T) {
|
||||
|
||||
func TestIFrameWithChildElements(t *testing.T) {
|
||||
input := `<iframe src="https://www.youtube.com/"><p>test</p></iframe>`
|
||||
expected := `<iframe src="https://www.youtube.com/" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe>`
|
||||
expected := `<div class="video-wrapper"><iframe src="https://www.youtube.com/" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe></div>`
|
||||
output := Sanitize("http://example.com/", input)
|
||||
|
||||
if expected != output {
|
||||
@@ -255,7 +265,7 @@ func TestEspaceAttributes(t *testing.T) {
|
||||
|
||||
func TestReplaceIframeURL(t *testing.T) {
|
||||
input := `<iframe src="https://player.vimeo.com/video/123456?title=0&byline=0"></iframe>`
|
||||
expected := `<iframe src="https://player.vimeo.com/video/123456?title=0&byline=0" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe>`
|
||||
expected := `<div class="video-wrapper"><iframe src="https://player.vimeo.com/video/123456?title=0&byline=0" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe></div>`
|
||||
output := Sanitize("http://example.org/", input)
|
||||
|
||||
if expected != output {
|
||||
@@ -292,3 +302,13 @@ func TestReplaceStyle(t *testing.T) {
|
||||
t.Errorf(`Wrong output: "%s" != "%s"`, expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrapYoutubeIFrames(t *testing.T) {
|
||||
input := `<iframe src="https://www.youtube.com/embed/foobar"></iframe>`
|
||||
expected := `<div class="video-wrapper"><iframe src="https://www.youtube.com/embed/foobar" sandbox="allow-scripts allow-same-origin allow-popups" loading="lazy"></iframe></div>`
|
||||
output := Sanitize("http://example.org/", input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf("Wrong output:\nwant: %v\nhave: %v", expected, output)
|
||||
}
|
||||
}
|
||||
|
17
src/content/silo/url.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package silo
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func RedirectURL(link string) string {
|
||||
if strings.HasPrefix(link, "https://www.google.com/url?") {
|
||||
if u, err := url.Parse(link); err == nil {
|
||||
if u2 := u.Query().Get("url"); u2 != "" {
|
||||
return u2
|
||||
}
|
||||
}
|
||||
}
|
||||
return link
|
||||
}
|
24
src/content/silo/url_test.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package silo
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestRedirectURL(t *testing.T) {
|
||||
link := "https://www.google.com/url?rct=j&sa=t&url=https://www.cryptoglobe.com/latest/2022/08/investment-strategist-lyn-alden-explains-why-she-is-still-bullish-on-bitcoin-long-term/&ct=ga&cd=CAIyGjlkMjI1NjUyODE3ODFjMDQ6Y29tOmVuOlVT&usg=AOvVaw16C2fJtw6m8QVEbto2HCKK"
|
||||
want := "https://www.cryptoglobe.com/latest/2022/08/investment-strategist-lyn-alden-explains-why-she-is-still-bullish-on-bitcoin-long-term/"
|
||||
have := RedirectURL(link)
|
||||
if have != want {
|
||||
t.Logf("want: %s", want)
|
||||
t.Logf("have: %s", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
link = "https://example.com"
|
||||
if RedirectURL(link) != link {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
link = "https://example.com/url?url=test.com"
|
||||
if RedirectURL(link) != link {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
106
src/main.go
@@ -1,106 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/nkanaev/yarr/src/platform"
|
||||
"github.com/nkanaev/yarr/src/server"
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
)
|
||||
|
||||
var Version string = "0.0"
|
||||
var GitHash string = "unknown"
|
||||
|
||||
func main() {
|
||||
log.SetOutput(os.Stdout)
|
||||
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
|
||||
|
||||
var addr, db, authfile, certfile, keyfile, basepath string
|
||||
var ver, open bool
|
||||
flag.StringVar(&addr, "addr", "127.0.0.1:7070", "address to run server on")
|
||||
flag.StringVar(&authfile, "auth-file", "", "path to a file containing username:password")
|
||||
flag.StringVar(&basepath, "base", "", "base path of the service url")
|
||||
flag.StringVar(&certfile, "cert-file", "", "path to cert file for https")
|
||||
flag.StringVar(&keyfile, "key-file", "", "path to key file for https")
|
||||
flag.StringVar(&db, "db", "", "storage file path")
|
||||
flag.BoolVar(&ver, "version", false, "print application version")
|
||||
flag.BoolVar(&open, "open", false, "open the server in browser")
|
||||
flag.Parse()
|
||||
|
||||
if ver {
|
||||
fmt.Printf("v%s (%s)\n", Version, GitHash)
|
||||
return
|
||||
}
|
||||
|
||||
configPath, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
log.Fatal("Failed to get config dir: ", err)
|
||||
}
|
||||
|
||||
if db == "" {
|
||||
storagePath := filepath.Join(configPath, "yarr")
|
||||
if err := os.MkdirAll(storagePath, 0755); err != nil {
|
||||
log.Fatal("Failed to create app config dir: ", err)
|
||||
}
|
||||
db = filepath.Join(storagePath, "storage.db")
|
||||
}
|
||||
|
||||
log.Printf("using db file %s", db)
|
||||
|
||||
var username, password string
|
||||
if authfile != "" {
|
||||
f, err := os.Open(authfile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open auth file: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
parts := strings.Split(line, ":")
|
||||
if len(parts) != 2 {
|
||||
log.Fatalf("Invalid auth: %v (expected `username:password`)", line)
|
||||
}
|
||||
username = parts[0]
|
||||
password = parts[1]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (certfile != "" || keyfile != "") && (certfile == "" || keyfile == "") {
|
||||
log.Fatalf("Both cert & key files are required")
|
||||
}
|
||||
|
||||
store, err := storage.New(db)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to initialise database: ", err)
|
||||
}
|
||||
|
||||
srv := server.NewServer(store, addr)
|
||||
|
||||
if basepath != "" {
|
||||
srv.BasePath = "/" + strings.Trim(basepath, "/")
|
||||
}
|
||||
|
||||
if certfile != "" && keyfile != "" {
|
||||
srv.CertFile = certfile
|
||||
srv.KeyFile = keyfile
|
||||
}
|
||||
|
||||
if username != "" && password != "" {
|
||||
srv.Username = username
|
||||
srv.Password = password
|
||||
}
|
||||
|
||||
log.Printf("starting server at %s", srv.GetAddr())
|
||||
if open {
|
||||
platform.Open(srv.GetAddr())
|
||||
}
|
||||
platform.Start(srv)
|
||||
}
|
@@ -47,6 +47,8 @@ type atomLinks []atomLink
|
||||
func (a *atomText) Text() string {
|
||||
if a.Type == "html" {
|
||||
return htmlutil.ExtractText(a.Data)
|
||||
} else if a.Type == "xhtml" {
|
||||
return htmlutil.ExtractText(a.XML)
|
||||
}
|
||||
return a.Data
|
||||
}
|
||||
@@ -81,9 +83,16 @@ func ParseAtom(r io.Reader) (*Feed, error) {
|
||||
SiteURL: firstNonEmpty(srcfeed.Links.First("alternate"), srcfeed.Links.First("")),
|
||||
}
|
||||
for _, srcitem := range srcfeed.Entries {
|
||||
link := firstNonEmpty(srcitem.OrigLink, srcitem.Links.First("alternate"), srcitem.Links.First(""))
|
||||
linkFromID := ""
|
||||
guidFromID := ""
|
||||
if htmlutil.IsAPossibleLink(srcitem.ID) {
|
||||
linkFromID = srcitem.ID
|
||||
guidFromID = srcitem.ID + "::" + srcitem.Updated
|
||||
}
|
||||
|
||||
link := firstNonEmpty(srcitem.OrigLink, srcitem.Links.First("alternate"), srcitem.Links.First(""), linkFromID)
|
||||
dstfeed.Items = append(dstfeed.Items, Item{
|
||||
GUID: firstNonEmpty(srcitem.ID, link),
|
||||
GUID: firstNonEmpty(guidFromID, srcitem.ID, link),
|
||||
Date: dateParse(firstNonEmpty(srcitem.Published, srcitem.Updated)),
|
||||
URL: link,
|
||||
Title: srcitem.Title.Text(),
|
||||
|
@@ -94,6 +94,44 @@ func TestAtomHTMLTitle(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestAtomXHTMLTitle(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<entry><title type="xhtml">say <code>what</code>?</entry>
|
||||
</feed>
|
||||
`))
|
||||
have := feed.Items[0].Title
|
||||
want := "say what?"
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestAtomXHTMLNestedTitle(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<entry>
|
||||
<title type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||
<a href="https://example.com">Link to Example</a>
|
||||
</div>
|
||||
</title>
|
||||
</entry>
|
||||
</feed>
|
||||
`))
|
||||
have := feed.Items[0].Title
|
||||
want := "Link to Example"
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestAtomImageLink(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
@@ -131,3 +169,48 @@ func TestAtomImageLinkDuplicated(t *testing.T) {
|
||||
t.Fatal("item.image_url must be unset if present in the content")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAtomLinkInID(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<entry>
|
||||
<title>one updated</title>
|
||||
<id>https://example.com/posts/1</id>
|
||||
<updated>2003-12-13T09:17:51</updated>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>two</title>
|
||||
<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>one</title>
|
||||
<id>https://example.com/posts/1</id>
|
||||
</entry>
|
||||
</feed>
|
||||
`))
|
||||
have := feed.Items
|
||||
want := []Item{
|
||||
Item{
|
||||
GUID: "https://example.com/posts/1::2003-12-13T09:17:51",
|
||||
Date: time.Date(2003, time.December, 13, 9, 17, 51, 0, time.UTC),
|
||||
URL: "https://example.com/posts/1",
|
||||
Title: "one updated",
|
||||
},
|
||||
Item{
|
||||
GUID: "urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6",
|
||||
Date: time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), URL: "",
|
||||
Title: "two",
|
||||
},
|
||||
Item{
|
||||
GUID: "https://example.com/posts/1::",
|
||||
Date: time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC),
|
||||
URL: "https://example.com/posts/1",
|
||||
Title: "one",
|
||||
Content: "",
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Fatalf("\nwant: %#v\nhave: %#v\n", want, have)
|
||||
}
|
||||
}
|
||||
|
@@ -9,15 +9,27 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/nkanaev/yarr/src/content/htmlutil"
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
var UnknownFormat = errors.New("unknown feed format")
|
||||
|
||||
type processor func(r io.Reader) (*Feed, error)
|
||||
type feedProbe struct {
|
||||
feedType string
|
||||
callback func(r io.Reader) (*Feed, error)
|
||||
encoding string
|
||||
}
|
||||
|
||||
func sniff(lookup string) (string, processor) {
|
||||
func sniff(lookup string) (out feedProbe) {
|
||||
lookup = strings.TrimSpace(lookup)
|
||||
lookup = strings.TrimLeft(lookup, "\x00\xEF\xBB\xBF\xFE\xFF")
|
||||
|
||||
if len(lookup) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
switch lookup[0] {
|
||||
case '<':
|
||||
decoder := xmlDecoder(strings.NewReader(lookup))
|
||||
@@ -26,24 +38,42 @@ func sniff(lookup string) (string, processor) {
|
||||
if token == nil {
|
||||
break
|
||||
}
|
||||
|
||||
// check <?xml encoding="ENCODING" ?>
|
||||
if el, ok := token.(xml.ProcInst); ok && el.Target == "xml" {
|
||||
out.encoding = strings.ToLower(procInst("encoding", string(el.Inst)))
|
||||
}
|
||||
|
||||
if el, ok := token.(xml.StartElement); ok {
|
||||
switch el.Name.Local {
|
||||
case "rss":
|
||||
return "rss", ParseRSS
|
||||
out.feedType = "rss"
|
||||
out.callback = ParseRSS
|
||||
return
|
||||
case "RDF":
|
||||
return "rdf", ParseRDF
|
||||
out.feedType = "rdf"
|
||||
out.callback = ParseRDF
|
||||
return
|
||||
case "feed":
|
||||
return "atom", ParseAtom
|
||||
out.feedType = "atom"
|
||||
out.callback = ParseAtom
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
case '{':
|
||||
return "json", ParseJSON
|
||||
out.feedType = "json"
|
||||
out.callback = ParseJSON
|
||||
return
|
||||
}
|
||||
return "", nil
|
||||
return
|
||||
}
|
||||
|
||||
func Parse(r io.Reader) (*Feed, error) {
|
||||
return ParseWithEncoding(r, "")
|
||||
}
|
||||
|
||||
func ParseWithEncoding(r io.Reader, fallbackEncoding string) (*Feed, error) {
|
||||
lookup := make([]byte, 2048)
|
||||
n, err := io.ReadFull(r, lookup)
|
||||
switch {
|
||||
@@ -56,18 +86,42 @@ func Parse(r io.Reader) (*Feed, error) {
|
||||
r = io.MultiReader(bytes.NewReader(lookup), r)
|
||||
}
|
||||
|
||||
_, callback := sniff(string(lookup))
|
||||
if callback == nil {
|
||||
out := sniff(string(lookup))
|
||||
if out.feedType == "" {
|
||||
return nil, UnknownFormat
|
||||
}
|
||||
|
||||
feed, err := callback(r)
|
||||
if out.encoding == "" && fallbackEncoding != "" {
|
||||
r, err = charset.NewReaderLabel(fallbackEncoding, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if (out.feedType != "json") && (out.encoding == "" || out.encoding == "utf-8") {
|
||||
// XML decoder will not rely on custom CharsetReader (see `xmlDecoder`)
|
||||
// to handle invalid xml characters.
|
||||
// Assume input is already UTF-8 and do the cleanup here.
|
||||
r = NewSafeXMLReader(r)
|
||||
}
|
||||
|
||||
feed, err := out.callback(r)
|
||||
if feed != nil {
|
||||
feed.cleanup()
|
||||
}
|
||||
return feed, err
|
||||
}
|
||||
|
||||
func ParseAndFix(r io.Reader, baseURL, fallbackEncoding string) (*Feed, error) {
|
||||
feed, err := ParseWithEncoding(r, fallbackEncoding)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
feed.TranslateURLs(baseURL)
|
||||
feed.SetMissingDatesTo(time.Now())
|
||||
return feed, nil
|
||||
}
|
||||
|
||||
func (feed *Feed) cleanup() {
|
||||
feed.Title = strings.TrimSpace(feed.Title)
|
||||
feed.SiteURL = strings.TrimSpace(feed.SiteURL)
|
||||
@@ -75,7 +129,7 @@ func (feed *Feed) cleanup() {
|
||||
for i, item := range feed.Items {
|
||||
feed.Items[i].GUID = strings.TrimSpace(item.GUID)
|
||||
feed.Items[i].URL = strings.TrimSpace(item.URL)
|
||||
feed.Items[i].Title = strings.TrimSpace(item.Title)
|
||||
feed.Items[i].Title = strings.TrimSpace(htmlutil.ExtractText(item.Title))
|
||||
feed.Items[i].Content = strings.TrimSpace(item.Content)
|
||||
|
||||
if item.ImageURL != "" && strings.Contains(item.Content, item.ImageURL) {
|
||||
|
@@ -7,38 +7,40 @@ import (
|
||||
)
|
||||
|
||||
func TestSniff(t *testing.T) {
|
||||
testcases := [][2]string{
|
||||
testcases := []struct {
|
||||
input string
|
||||
want feedProbe
|
||||
}{
|
||||
{
|
||||
`<?xml version="1.0"?><rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"></rdf:RDF>`,
|
||||
"rdf",
|
||||
feedProbe{feedType: "rdf", callback: ParseRDF},
|
||||
},
|
||||
{
|
||||
`<?xml version="1.0" encoding="ISO-8859-1"?><rss version="2.0"><channel></channel></rss>`,
|
||||
"rss",
|
||||
feedProbe{feedType: "rss", callback: ParseRSS, encoding: "iso-8859-1"},
|
||||
},
|
||||
{
|
||||
`<?xml version="1.0"?><rss version="2.0"><channel></channel></rss>`,
|
||||
"rss",
|
||||
feedProbe{feedType: "rss", callback: ParseRSS},
|
||||
},
|
||||
{
|
||||
`<?xml version="1.0" encoding="utf-8"?><feed xmlns="http://www.w3.org/2005/Atom"></feed>`,
|
||||
"atom",
|
||||
feedProbe{feedType: "atom", callback: ParseAtom, encoding: "utf-8"},
|
||||
},
|
||||
{
|
||||
`{}`,
|
||||
"json",
|
||||
feedProbe{feedType: "json", callback: ParseJSON},
|
||||
},
|
||||
{
|
||||
`<!DOCTYPE html><html><head><title></title></head><body></body></html>`,
|
||||
"",
|
||||
feedProbe{},
|
||||
},
|
||||
}
|
||||
for _, testcase := range testcases {
|
||||
have, _ := sniff(testcase[0])
|
||||
want := testcase[1]
|
||||
if want != have {
|
||||
t.Log(testcase[0])
|
||||
t.Errorf("Invalid format: want=%#v have=%#v", want, have)
|
||||
want := testcase.want
|
||||
have := sniff(testcase.input)
|
||||
if want.encoding != have.encoding || want.feedType != have.feedType {
|
||||
t.Errorf("Invalid output\n---\n%s\n---\n\nwant=%#v\nhave=%#v", testcase.input, want, have)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -107,3 +109,44 @@ func TestParseFeedWithBOM(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCleanIllegalCharsInUTF8(t *testing.T) {
|
||||
data := `
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>` + "\a" + `title</title>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`
|
||||
feed, err := Parse(strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(feed.Items) != 1 || feed.Items[0].Title != "title" {
|
||||
t.Fatalf("invalid feed, got: %v", feed)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCleanIllegalCharsInNonUTF8(t *testing.T) {
|
||||
// echo привет | iconv -f utf8 -t cp1251 | hexdump -C
|
||||
data := `
|
||||
<?xml version="1.0" encoding="windows-1251"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>` + "\a \xef\xf0\xe8\xe2\xe5\xf2\x0a \a" + `</title>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`
|
||||
feed, err := Parse(strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(feed.Items) != 1 || feed.Items[0].Title != "привет" {
|
||||
t.Fatalf("invalid feed, got: %v", feed)
|
||||
}
|
||||
}
|
||||
|
@@ -20,9 +20,9 @@ type rssFeed struct {
|
||||
}
|
||||
|
||||
type rssItem struct {
|
||||
GUID string `xml:"guid"`
|
||||
GUID rssGuid `xml:"guid"`
|
||||
Title string `xml:"title"`
|
||||
Link string `xml:"link"`
|
||||
Link string `xml:"rss link"`
|
||||
Description string `xml:"rss description"`
|
||||
PubDate string `xml:"pubDate"`
|
||||
Enclosures []rssEnclosure `xml:"enclosure"`
|
||||
@@ -36,6 +36,11 @@ type rssItem struct {
|
||||
media
|
||||
}
|
||||
|
||||
type rssGuid struct {
|
||||
GUID string `xml:",chardata"`
|
||||
IsPermaLink string `xml:"isPermaLink,attr"`
|
||||
}
|
||||
|
||||
type rssLink struct {
|
||||
XMLName xml.Name
|
||||
Data string `xml:",chardata"`
|
||||
@@ -71,7 +76,7 @@ func ParseRSS(r io.Reader) (*Feed, error) {
|
||||
for _, srcitem := range srcfeed.Items {
|
||||
podcastURL := ""
|
||||
for _, e := range srcitem.Enclosures {
|
||||
if e.Type == "audio/mpeg" || e.Type == "audio/x-m4a" {
|
||||
if strings.HasPrefix(e.Type, "audio/") {
|
||||
podcastURL = e.URL
|
||||
|
||||
if srcitem.OrigEnclosureLink != "" && strings.Contains(podcastURL, path.Base(srcitem.OrigEnclosureLink)) {
|
||||
@@ -81,10 +86,15 @@ func ParseRSS(r io.Reader) (*Feed, error) {
|
||||
}
|
||||
}
|
||||
|
||||
permalink := ""
|
||||
if srcitem.GUID.IsPermaLink == "true" {
|
||||
permalink = srcitem.GUID.GUID
|
||||
}
|
||||
|
||||
dstfeed.Items = append(dstfeed.Items, Item{
|
||||
GUID: firstNonEmpty(srcitem.GUID, srcitem.Link),
|
||||
GUID: firstNonEmpty(srcitem.GUID.GUID, srcitem.Link),
|
||||
Date: dateParse(firstNonEmpty(srcitem.DublinCoreDate, srcitem.PubDate)),
|
||||
URL: firstNonEmpty(srcitem.OrigLink, srcitem.Link),
|
||||
URL: firstNonEmpty(srcitem.OrigLink, srcitem.Link, permalink),
|
||||
Title: srcitem.Title,
|
||||
Content: firstNonEmpty(srcitem.ContentEncoded, srcitem.Description),
|
||||
AudioURL: podcastURL,
|
||||
|
@@ -136,6 +136,26 @@ func TestRSSPodcast(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestRSSOpusPodcast(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<item>
|
||||
<enclosure length="100500" type="audio/opus" url="http://example.com/audio.ext"/>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`))
|
||||
have := feed.Items[0].AudioURL
|
||||
want := "http://example.com/audio.ext"
|
||||
if want != have {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
// found in: https://podcast.cscript.site/podcast.xml
|
||||
func TestRSSPodcastDuplicated(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
@@ -160,3 +180,51 @@ func TestRSSPodcastDuplicated(t *testing.T) {
|
||||
t.Fatal("item.audio_url must be unset if present in the content")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRSSTitleHTMLTags(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<item>
|
||||
<title><p>title in p</p></title>
|
||||
</item>
|
||||
<item>
|
||||
<title>very <strong>strong</strong> title</title>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`))
|
||||
have := []string{feed.Items[0].Title, feed.Items[1].Title}
|
||||
want := []string{"title in p", "very strong title"}
|
||||
for i := 0; i < len(want); i++ {
|
||||
if want[i] != have[i] {
|
||||
t.Errorf("title doesn't match\nwant: %#v\nhave: %#v\n", want[i], have[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRSSIsPermalink(t *testing.T) {
|
||||
feed, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<item>
|
||||
<guid isPermaLink="true">http://example.com/posts/1</guid>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`))
|
||||
have := feed.Items
|
||||
want := []Item{
|
||||
{
|
||||
GUID: "http://example.com/posts/1",
|
||||
URL: "http://example.com/posts/1",
|
||||
},
|
||||
}
|
||||
for i := 0; i < len(want); i++ {
|
||||
if want[i] != have[i] {
|
||||
t.Errorf("Failed to handle isPermalink\nwant: %#v\nhave: %#v\n", want[i], have[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"io"
|
||||
"regexp"
|
||||
@@ -30,6 +32,81 @@ func plain2html(text string) string {
|
||||
func xmlDecoder(r io.Reader) *xml.Decoder {
|
||||
decoder := xml.NewDecoder(r)
|
||||
decoder.Strict = false
|
||||
decoder.CharsetReader = charset.NewReaderLabel
|
||||
decoder.CharsetReader = func(cs string, input io.Reader) (io.Reader, error) {
|
||||
r, err := charset.NewReaderLabel(cs, input)
|
||||
if err == nil {
|
||||
r = NewSafeXMLReader(r)
|
||||
}
|
||||
return r, err
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
|
||||
type safexmlreader struct {
|
||||
reader *bufio.Reader
|
||||
buffer *bytes.Buffer
|
||||
}
|
||||
|
||||
func NewSafeXMLReader(r io.Reader) io.Reader {
|
||||
return &safexmlreader{
|
||||
reader: bufio.NewReader(r),
|
||||
buffer: bytes.NewBuffer(make([]byte, 0, 4096)),
|
||||
}
|
||||
}
|
||||
|
||||
func (xr *safexmlreader) Read(p []byte) (int, error) {
|
||||
for xr.buffer.Len() < cap(p) {
|
||||
r, _, err := xr.reader.ReadRune()
|
||||
if err == io.EOF {
|
||||
if xr.buffer.Len() == 0 {
|
||||
return 0, io.EOF
|
||||
}
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if isInCharacterRange(r) {
|
||||
xr.buffer.WriteRune(r)
|
||||
}
|
||||
}
|
||||
return xr.buffer.Read(p)
|
||||
}
|
||||
|
||||
// NOTE: copied from "encoding/xml" package
|
||||
// Decide whether the given rune is in the XML Character Range, per
|
||||
// the Char production of https://www.xml.com/axml/testaxml.htm,
|
||||
// Section 2.2 Characters.
|
||||
func isInCharacterRange(r rune) (inrange bool) {
|
||||
return r == 0x09 ||
|
||||
r == 0x0A ||
|
||||
r == 0x0D ||
|
||||
r >= 0x20 && r <= 0xD7FF ||
|
||||
r >= 0xE000 && r <= 0xFFFD ||
|
||||
r >= 0x10000 && r <= 0x10FFFF
|
||||
}
|
||||
|
||||
// NOTE: copied from "encoding/xml" package
|
||||
// procInst parses the `param="..."` or `param='...'`
|
||||
// value out of the provided string, returning "" if not found.
|
||||
func procInst(param, s string) string {
|
||||
// TODO: this parsing is somewhat lame and not exact.
|
||||
// It works for all actual cases, though.
|
||||
param = param + "="
|
||||
idx := strings.Index(s, param)
|
||||
if idx == -1 {
|
||||
return ""
|
||||
}
|
||||
v := s[idx+len(param):]
|
||||
if v == "" {
|
||||
return ""
|
||||
}
|
||||
if v[0] != '\'' && v[0] != '"' {
|
||||
return ""
|
||||
}
|
||||
idx = strings.IndexRune(v[1:], rune(v[0]))
|
||||
if idx == -1 {
|
||||
return ""
|
||||
}
|
||||
return v[1 : idx+1]
|
||||
}
|
||||
|
88
src/parser/util_test.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSafeXMLReader(t *testing.T) {
|
||||
var f io.Reader
|
||||
want := []byte("привет мир")
|
||||
f = bytes.NewReader(want)
|
||||
f = NewSafeXMLReader(f)
|
||||
|
||||
have, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Fatalf("invalid output\nwant: %v\nhave: %v", want, have)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSafeXMLReaderRemoveUnwantedRunes(t *testing.T) {
|
||||
var f io.Reader
|
||||
input := []byte("\aпривет \x0cмир\ufffe\uffff")
|
||||
want := []byte("привет мир")
|
||||
f = bytes.NewReader(input)
|
||||
f = NewSafeXMLReader(f)
|
||||
|
||||
have, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Fatalf("invalid output\nwant: %v\nhave: %v", want, have)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSafeXMLReaderPartial1(t *testing.T) {
|
||||
var f io.Reader
|
||||
input := []byte("\aпривет \x0cмир\ufffe\uffff")
|
||||
want := []byte("привет мир")
|
||||
f = bytes.NewReader(input)
|
||||
f = NewSafeXMLReader(f)
|
||||
|
||||
buf := make([]byte, 1)
|
||||
for i := 0; i < len(want); i++ {
|
||||
n, err := f.Read(buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if n != 1 {
|
||||
t.Fatalf("expected 1 byte, got %d", n)
|
||||
}
|
||||
if buf[0] != want[i] {
|
||||
t.Fatalf("invalid char at pos %d\nwant: %v\nhave: %v", i, want[i], buf[0])
|
||||
}
|
||||
}
|
||||
if x, err := f.Read(buf); err != io.EOF {
|
||||
t.Fatalf("expected EOF, %v, %v %v", buf, x, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSafeXMLReaderPartial2(t *testing.T) {
|
||||
var f io.Reader
|
||||
input := []byte("привет\a\a\a\a\a")
|
||||
f = bytes.NewReader(input)
|
||||
f = NewSafeXMLReader(f)
|
||||
|
||||
buf := make([]byte, 12)
|
||||
n, err := f.Read(buf)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if n != 12 {
|
||||
t.Fatalf("expected 12 bytes")
|
||||
}
|
||||
|
||||
n, err = f.Read(buf)
|
||||
if n != 0 {
|
||||
t.Fatalf("expected 0")
|
||||
}
|
||||
if err != io.EOF {
|
||||
t.Fatalf("expected EOF, got %v", err)
|
||||
}
|
||||
}
|
15
src/platform/fixconsole_default.go
Normal file
@@ -0,0 +1,15 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package platform
|
||||
|
||||
// On non-windows platforms, we don't need to do anything. The console
|
||||
// starts off attached already, if it exists.
|
||||
|
||||
func AttachConsole() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func FixConsoleIfNeeded() error {
|
||||
return nil
|
||||
}
|
134
src/platform/fixconsole_windows.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package platform
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"golang.org/x/sys/windows"
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func AttachConsole() error {
|
||||
const ATTACH_PARENT_PROCESS = ^uintptr(0)
|
||||
proc := syscall.MustLoadDLL("kernel32.dll").MustFindProc("AttachConsole")
|
||||
r1, _, err := proc.Call(ATTACH_PARENT_PROCESS)
|
||||
if r1 == 0 {
|
||||
errno, ok := err.(syscall.Errno)
|
||||
if ok && errno == windows.ERROR_INVALID_HANDLE {
|
||||
// console handle doesn't exist; not a real
|
||||
// error, but the console handle will be
|
||||
// invalid.
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var oldStdin, oldStdout, oldStderr *os.File
|
||||
|
||||
// Windows console output is a mess.
|
||||
//
|
||||
// If you compile as "-H windows", then if you launch your program without
|
||||
// a console, Windows forcibly creates one to use as your stdin/stdout, which
|
||||
// is silly for a GUI app, so we can't do that.
|
||||
//
|
||||
// If you compile as "-H windowsgui", then it doesn't create a console for
|
||||
// your app... but also doesn't provide a working stdin/stdout/stderr even if
|
||||
// you *did* launch from the console. However, you can use AttachConsole()
|
||||
// to get a handle to your parent process's console, if any, and then
|
||||
// os.NewFile() to turn that handle into a fd usable as stdout/stderr.
|
||||
//
|
||||
// However, then you have the problem that if you redirect stdout or stderr
|
||||
// from the shell, you end up ignoring the redirection by forcing it to the
|
||||
// console.
|
||||
//
|
||||
// To fix *that*, we have to detect whether there was a pre-existing stdout
|
||||
// or not. We can check GetStdHandle(), which returns 0 for "should be
|
||||
// console" and nonzero for "already pointing at a file."
|
||||
//
|
||||
// Be careful though! As soon as you run AttachConsole(), it resets *all*
|
||||
// the GetStdHandle() handles to point them at the console instead, thus
|
||||
// throwing away the original file redirects. So we have to GetStdHandle()
|
||||
// *before* AttachConsole().
|
||||
//
|
||||
// For some reason, powershell redirections provide a valid file handle, but
|
||||
// writing to that handle doesn't write to the file. I haven't found a way
|
||||
// to work around that. (Windows 10.0.17763.379)
|
||||
//
|
||||
// Net result is as follows.
|
||||
// Before:
|
||||
//
|
||||
// SHELL NON-REDIRECTED REDIRECTED
|
||||
// explorer.exe no console n/a
|
||||
// cmd.exe broken works
|
||||
// powershell broken broken
|
||||
// WSL bash broken works
|
||||
//
|
||||
// After
|
||||
//
|
||||
// SHELL NON-REDIRECTED REDIRECTED
|
||||
// explorer.exe no console n/a
|
||||
// cmd.exe works works
|
||||
// powershell works broken
|
||||
// WSL bash works works
|
||||
//
|
||||
// We don't seem to make anything worse, at least.
|
||||
func FixConsoleIfNeeded() error {
|
||||
// Retain the original console objects, to prevent Go from automatically
|
||||
// closing their file descriptors when they get garbage collected.
|
||||
// You never want to close file descriptors 0, 1, and 2.
|
||||
oldStdin, oldStdout, oldStderr = os.Stdin, os.Stdout, os.Stderr
|
||||
|
||||
stdin, _ := syscall.GetStdHandle(syscall.STD_INPUT_HANDLE)
|
||||
stdout, _ := syscall.GetStdHandle(syscall.STD_OUTPUT_HANDLE)
|
||||
stderr, _ := syscall.GetStdHandle(syscall.STD_ERROR_HANDLE)
|
||||
|
||||
var invalid syscall.Handle
|
||||
con := invalid
|
||||
|
||||
if stdin == invalid || stdout == invalid || stderr == invalid {
|
||||
err := AttachConsole()
|
||||
if err != nil {
|
||||
return fmt.Errorf("attachconsole: %v", err)
|
||||
}
|
||||
|
||||
if stdin == invalid {
|
||||
stdin, _ = syscall.GetStdHandle(syscall.STD_INPUT_HANDLE)
|
||||
}
|
||||
if stdout == invalid {
|
||||
stdout, _ = syscall.GetStdHandle(syscall.STD_OUTPUT_HANDLE)
|
||||
con = stdout
|
||||
}
|
||||
if stderr == invalid {
|
||||
stderr, _ = syscall.GetStdHandle(syscall.STD_ERROR_HANDLE)
|
||||
con = stderr
|
||||
}
|
||||
}
|
||||
|
||||
if con != invalid {
|
||||
// Make sure the console is configured to convert
|
||||
// \n to \r\n, like Go programs expect.
|
||||
h := windows.Handle(con)
|
||||
var st uint32
|
||||
err := windows.GetConsoleMode(h, &st)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetConsoleMode: %v", err)
|
||||
}
|
||||
err = windows.SetConsoleMode(h, st&^windows.DISABLE_NEWLINE_AUTO_RETURN)
|
||||
if err != nil {
|
||||
return fmt.Errorf("SetConsoleMode: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if stdin != invalid {
|
||||
os.Stdin = os.NewFile(uintptr(stdin), "stdin")
|
||||
}
|
||||
if stdout != invalid {
|
||||
os.Stdout = os.NewFile(uintptr(stdout), "stdout")
|
||||
}
|
||||
if stderr != invalid {
|
||||
os.Stderr = os.NewFile(uintptr(stderr), "stderr")
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,3 +1,4 @@
|
||||
//go:build macos || windows
|
||||
// +build macos windows
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build !windows && !macos
|
||||
// +build !windows,!macos
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build macos
|
||||
// +build macos
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build !windows && !darwin
|
||||
// +build !windows,!darwin
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build darwin
|
||||
// +build darwin
|
||||
|
||||
package platform
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package platform
|
||||
|
@@ -12,7 +12,7 @@ type Middleware struct {
|
||||
Username string
|
||||
Password string
|
||||
BasePath string
|
||||
Public string
|
||||
Public []string
|
||||
}
|
||||
|
||||
func unsafeMethod(method string) bool {
|
||||
@@ -20,9 +20,11 @@ func unsafeMethod(method string) bool {
|
||||
}
|
||||
|
||||
func (m *Middleware) Handler(c *router.Context) {
|
||||
if strings.HasPrefix(c.Req.URL.Path, m.BasePath+m.Public) {
|
||||
c.Next()
|
||||
return
|
||||
for _, path := range m.Public {
|
||||
if strings.HasPrefix(c.Req.URL.Path, m.BasePath+path) {
|
||||
c.Next()
|
||||
return
|
||||
}
|
||||
}
|
||||
if IsAuthenticated(c.Req, m.Username, m.Password) {
|
||||
c.Next()
|
||||
|
393
src/server/fever.go
Normal file
@@ -0,0 +1,393 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/nkanaev/yarr/src/server/auth"
|
||||
"github.com/nkanaev/yarr/src/server/router"
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
)
|
||||
|
||||
type FeverGroup struct {
|
||||
ID int64 `json:"id"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type FeverFeedsGroup struct {
|
||||
GroupID int64 `json:"group_id"`
|
||||
FeedIDs string `json:"feed_ids"`
|
||||
}
|
||||
|
||||
type FeverFeed struct {
|
||||
ID int64 `json:"id"`
|
||||
FaviconID int64 `json:"favicon_id"`
|
||||
Title string `json:"title"`
|
||||
Url string `json:"url"`
|
||||
SiteUrl string `json:"site_url"`
|
||||
IsSpark int `json:"is_spark"`
|
||||
LastUpdated int64 `json:"last_updated_on_time"`
|
||||
}
|
||||
|
||||
type FeverItem struct {
|
||||
ID int64 `json:"id"`
|
||||
FeedID int64 `json:"feed_id"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
HTML string `json:"html"`
|
||||
Url string `json:"url"`
|
||||
IsSaved int `json:"is_saved"`
|
||||
IsRead int `json:"is_read"`
|
||||
CreatedAt int64 `json:"created_on_time"`
|
||||
}
|
||||
|
||||
type FeverFavicon struct {
|
||||
ID int64 `json:"id"`
|
||||
Data string `json:"data"`
|
||||
}
|
||||
|
||||
func writeFeverJSON(c *router.Context, data map[string]interface{}, lastRefreshed int64) {
|
||||
data["api_version"] = 3
|
||||
data["auth"] = 1
|
||||
data["last_refreshed_on_time"] = lastRefreshed
|
||||
c.JSON(http.StatusOK, data)
|
||||
}
|
||||
|
||||
func getLastRefreshedOnTime(httpStates map[int64]storage.HTTPState) int64 {
|
||||
if len(httpStates) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
var lastRefreshed int64
|
||||
for _, state := range httpStates {
|
||||
if state.LastRefreshed.Unix() > lastRefreshed {
|
||||
lastRefreshed = state.LastRefreshed.Unix()
|
||||
}
|
||||
}
|
||||
return lastRefreshed
|
||||
}
|
||||
|
||||
func (s *Server) feverAuth(c *router.Context) bool {
|
||||
if s.Username != "" && s.Password != "" {
|
||||
apiKey := c.Req.FormValue("api_key")
|
||||
apiKey = strings.ToLower(apiKey)
|
||||
md5HashValue := md5.Sum([]byte(fmt.Sprintf("%s:%s", s.Username, s.Password)))
|
||||
hexMD5HashValue := fmt.Sprintf("%x", md5HashValue[:])
|
||||
if !auth.StringsEqual(apiKey, hexMD5HashValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func formHasValue(values url.Values, value string) bool {
|
||||
if _, ok := values[value]; ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *Server) handleFever(c *router.Context) {
|
||||
c.Req.ParseForm()
|
||||
if !s.feverAuth(c) {
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"api_version": 3,
|
||||
"auth": 0,
|
||||
"last_refreshed_on_time": 0,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
switch {
|
||||
case formHasValue(c.Req.Form, "groups"):
|
||||
s.feverGroupsHandler(c)
|
||||
case formHasValue(c.Req.Form, "feeds"):
|
||||
s.feverFeedsHandler(c)
|
||||
case formHasValue(c.Req.Form, "unread_item_ids"):
|
||||
s.feverUnreadItemIDsHandler(c)
|
||||
case formHasValue(c.Req.Form, "saved_item_ids"):
|
||||
s.feverSavedItemIDsHandler(c)
|
||||
case formHasValue(c.Req.Form, "favicons"):
|
||||
s.feverFaviconsHandler(c)
|
||||
case formHasValue(c.Req.Form, "items"):
|
||||
s.feverItemsHandler(c)
|
||||
case formHasValue(c.Req.Form, "links"):
|
||||
s.feverLinksHandler(c)
|
||||
case formHasValue(c.Req.Form, "mark"):
|
||||
s.feverMarkHandler(c)
|
||||
default:
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"api_version": 3,
|
||||
"auth": 1,
|
||||
"last_refreshed_on_time": getLastRefreshedOnTime(s.db.ListHTTPStates()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func joinInts(values []int64) string {
|
||||
var result strings.Builder
|
||||
for i, val := range values {
|
||||
fmt.Fprintf(&result, "%d", val)
|
||||
if i != len(values)-1 {
|
||||
result.WriteString(",")
|
||||
}
|
||||
}
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func feedGroups(db *storage.Storage) []*FeverFeedsGroup {
|
||||
feeds := db.ListFeeds()
|
||||
|
||||
groupFeeds := make(map[int64][]int64)
|
||||
for _, feed := range feeds {
|
||||
if feed.FolderId == nil {
|
||||
continue
|
||||
}
|
||||
groupFeeds[*feed.FolderId] = append(groupFeeds[*feed.FolderId], feed.Id)
|
||||
}
|
||||
result := make([]*FeverFeedsGroup, 0)
|
||||
for groupId, feedIds := range groupFeeds {
|
||||
result = append(result, &FeverFeedsGroup{
|
||||
GroupID: groupId,
|
||||
FeedIDs: joinInts(feedIds),
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (s *Server) feverGroupsHandler(c *router.Context) {
|
||||
folders := s.db.ListFolders()
|
||||
groups := make([]*FeverGroup, len(folders))
|
||||
for i, folder := range folders {
|
||||
groups[i] = &FeverGroup{ID: folder.Id, Title: folder.Title}
|
||||
}
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"groups": groups,
|
||||
"feeds_groups": feedGroups(s.db),
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
func (s *Server) feverFeedsHandler(c *router.Context) {
|
||||
feeds := s.db.ListFeeds()
|
||||
httpStates := s.db.ListHTTPStates()
|
||||
|
||||
feverFeeds := make([]*FeverFeed, len(feeds))
|
||||
for i, feed := range feeds {
|
||||
var lastUpdated int64
|
||||
if state, ok := httpStates[feed.Id]; ok {
|
||||
lastUpdated = state.LastRefreshed.Unix()
|
||||
}
|
||||
feverFeeds[i] = &FeverFeed{
|
||||
ID: feed.Id,
|
||||
FaviconID: feed.Id,
|
||||
Title: feed.Title,
|
||||
Url: feed.FeedLink,
|
||||
SiteUrl: feed.Link,
|
||||
IsSpark: 0,
|
||||
LastUpdated: lastUpdated,
|
||||
}
|
||||
}
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"feeds": feverFeeds,
|
||||
"feeds_groups": feedGroups(s.db),
|
||||
}, getLastRefreshedOnTime(httpStates))
|
||||
}
|
||||
|
||||
func (s *Server) feverFaviconsHandler(c *router.Context) {
|
||||
feeds := s.db.ListFeeds()
|
||||
favicons := make([]*FeverFavicon, len(feeds))
|
||||
for i, feed := range feeds {
|
||||
data := "data:image/gif;base64,R0lGODlhAQABAAAAACw="
|
||||
if feed.HasIcon {
|
||||
icon := s.db.GetFeed(feed.Id).Icon
|
||||
data = fmt.Sprintf(
|
||||
"data:%s;base64,%s",
|
||||
http.DetectContentType(*icon),
|
||||
base64.StdEncoding.EncodeToString(*icon),
|
||||
)
|
||||
}
|
||||
favicons[i] = &FeverFavicon{ID: feed.Id, Data: data}
|
||||
}
|
||||
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"favicons": favicons,
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
// for memory pressure reasons, we only return a limited number of items
|
||||
// documented at https://github.com/DigitalDJ/tinytinyrss-fever-plugin/blob/master/fever-api.md#items
|
||||
const listLimit = 50
|
||||
|
||||
func (s *Server) feverItemsHandler(c *router.Context) {
|
||||
filter := storage.ItemFilter{}
|
||||
query := c.Req.URL.Query()
|
||||
|
||||
switch {
|
||||
case query.Get("with_ids") != "":
|
||||
ids := make([]int64, 0)
|
||||
for _, idstr := range strings.Split(query.Get("with_ids"), ",") {
|
||||
if idnum, err := strconv.ParseInt(idstr, 10, 64); err == nil {
|
||||
ids = append(ids, idnum)
|
||||
}
|
||||
}
|
||||
filter.IDs = &ids
|
||||
case query.Get("since_id") != "":
|
||||
idstr := query.Get("since_id")
|
||||
if idnum, err := strconv.ParseInt(idstr, 10, 64); err == nil {
|
||||
filter.SinceID = &idnum
|
||||
}
|
||||
case query.Get("max_id") != "":
|
||||
idstr := query.Get("max_id")
|
||||
if idnum, err := strconv.ParseInt(idstr, 10, 64); err == nil {
|
||||
filter.MaxID = &idnum
|
||||
}
|
||||
}
|
||||
|
||||
items := s.db.ListItems(filter, listLimit, true, true)
|
||||
|
||||
feverItems := make([]FeverItem, len(items))
|
||||
for i, item := range items {
|
||||
date := item.Date
|
||||
time := date.Unix()
|
||||
|
||||
isSaved := 0
|
||||
if item.Status == storage.STARRED {
|
||||
isSaved = 1
|
||||
}
|
||||
isRead := 0
|
||||
if item.Status == storage.READ {
|
||||
isRead = 1
|
||||
}
|
||||
feverItems[i] = FeverItem{
|
||||
ID: item.Id,
|
||||
FeedID: item.FeedId,
|
||||
Title: item.Title,
|
||||
Author: "",
|
||||
HTML: item.Content,
|
||||
Url: item.Link,
|
||||
IsSaved: isSaved,
|
||||
IsRead: isRead,
|
||||
CreatedAt: time,
|
||||
}
|
||||
}
|
||||
|
||||
totalItems := s.db.CountItems(storage.ItemFilter{})
|
||||
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"items": feverItems,
|
||||
"total_items": totalItems,
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
func (s *Server) feverLinksHandler(c *router.Context) {
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"links": make([]interface{}, 0),
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
func (s *Server) feverUnreadItemIDsHandler(c *router.Context) {
|
||||
status := storage.UNREAD
|
||||
itemIds := make([]int64, 0)
|
||||
|
||||
itemFilter := storage.ItemFilter{
|
||||
Status: &status,
|
||||
}
|
||||
for {
|
||||
items := s.db.ListItems(itemFilter, listLimit, true, false)
|
||||
if len(items) == 0 {
|
||||
break
|
||||
}
|
||||
for _, item := range items {
|
||||
itemIds = append(itemIds, item.Id)
|
||||
}
|
||||
itemFilter.After = &items[len(items)-1].Id
|
||||
}
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"unread_item_ids": joinInts(itemIds),
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
func (s *Server) feverSavedItemIDsHandler(c *router.Context) {
|
||||
status := storage.STARRED
|
||||
itemIds := make([]int64, 0)
|
||||
|
||||
itemFilter := storage.ItemFilter{
|
||||
Status: &status,
|
||||
}
|
||||
for {
|
||||
items := s.db.ListItems(itemFilter, listLimit, true, false)
|
||||
if len(items) == 0 {
|
||||
break
|
||||
}
|
||||
for _, item := range items {
|
||||
itemIds = append(itemIds, item.Id)
|
||||
}
|
||||
itemFilter.After = &items[len(items)-1].Id
|
||||
}
|
||||
writeFeverJSON(c, map[string]interface{}{
|
||||
"saved_item_ids": joinInts(itemIds),
|
||||
}, getLastRefreshedOnTime(s.db.ListHTTPStates()))
|
||||
}
|
||||
|
||||
func (s *Server) feverMarkHandler(c *router.Context) {
|
||||
id, err := strconv.ParseInt(c.Req.Form.Get("id"), 10, 64)
|
||||
if err != nil {
|
||||
log.Print("invalid id:", err)
|
||||
return
|
||||
}
|
||||
|
||||
switch c.Req.Form.Get("mark") {
|
||||
case "item":
|
||||
var status storage.ItemStatus
|
||||
switch c.Req.Form.Get("as") {
|
||||
case "read":
|
||||
status = storage.READ
|
||||
case "unread":
|
||||
status = storage.UNREAD
|
||||
case "saved":
|
||||
status = storage.STARRED
|
||||
case "unsaved":
|
||||
status = storage.READ
|
||||
default:
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
s.db.UpdateItemStatus(id, status)
|
||||
case "feed":
|
||||
if c.Req.Form.Get("as") != "read" {
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
}
|
||||
markFilter := storage.MarkFilter{FeedID: &id}
|
||||
x, _ := strconv.ParseInt(c.Req.Form.Get("before"), 10, 64)
|
||||
if x > 0 {
|
||||
before := time.Unix(x, 0)
|
||||
markFilter.Before = &before
|
||||
}
|
||||
s.db.MarkItemsRead(markFilter)
|
||||
case "group":
|
||||
if c.Req.Form.Get("as") != "read" {
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
}
|
||||
markFilter := storage.MarkFilter{FolderID: &id}
|
||||
x, _ := strconv.ParseInt(c.Req.Form.Get("before"), 10, 64)
|
||||
if x > 0 {
|
||||
before := time.Unix(x, 0)
|
||||
markFilter.Before = &before
|
||||
}
|
||||
s.db.MarkItemsRead(markFilter)
|
||||
default:
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"api_version": 3,
|
||||
"auth": 1,
|
||||
})
|
||||
}
|
@@ -28,15 +28,16 @@ func (rw *gzipResponseWriter) WriteHeader(statusCode int) {
|
||||
}
|
||||
|
||||
func Middleware(c *router.Context) {
|
||||
if strings.Contains(c.Req.Header.Get("Accept-Encoding"), "gzip") {
|
||||
gz := &gzipResponseWriter{out: gzip.NewWriter(c.Out), src: c.Out}
|
||||
defer gz.out.Close()
|
||||
|
||||
c.Out.Header().Set("Content-Encoding", "gzip")
|
||||
c.Out = gz
|
||||
if !strings.Contains(c.Req.Header.Get("Accept-Encoding"), "gzip") {
|
||||
c.Next()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
gz := &gzipResponseWriter{out: gzip.NewWriter(c.Out), src: c.Out}
|
||||
defer gz.out.Close()
|
||||
|
||||
c.Out.Header().Set("Content-Encoding", "gzip")
|
||||
c.Out = gz
|
||||
|
||||
c.Next()
|
||||
}
|
||||
|
@@ -3,6 +3,8 @@ package opml
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io"
|
||||
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
type opml struct {
|
||||
@@ -13,6 +15,7 @@ type opml struct {
|
||||
type outline struct {
|
||||
Type string `xml:"type,attr,omitempty"`
|
||||
Title string `xml:"text,attr"`
|
||||
Title2 string `xml:"title,attr,omitempty"`
|
||||
FeedUrl string `xml:"xmlUrl,attr,omitempty"`
|
||||
SiteUrl string `xml:"htmlUrl,attr,omitempty"`
|
||||
Outlines []outline `xml:"outline,omitempty"`
|
||||
@@ -21,14 +24,18 @@ type outline struct {
|
||||
func buildFolder(title string, outlines []outline) Folder {
|
||||
folder := Folder{Title: title}
|
||||
for _, outline := range outlines {
|
||||
if outline.Type == "rss" {
|
||||
if outline.Type == "rss" || outline.FeedUrl != "" {
|
||||
folder.Feeds = append(folder.Feeds, Feed{
|
||||
Title: outline.Title,
|
||||
FeedUrl: outline.FeedUrl,
|
||||
SiteUrl: outline.SiteUrl,
|
||||
})
|
||||
} else {
|
||||
subfolder := buildFolder(outline.Title, outline.Outlines)
|
||||
title := outline.Title
|
||||
if title == "" {
|
||||
title = outline.Title2
|
||||
}
|
||||
subfolder := buildFolder(title, outline.Outlines)
|
||||
folder.Folders = append(folder.Folders, subfolder)
|
||||
}
|
||||
}
|
||||
@@ -40,6 +47,7 @@ func Parse(r io.Reader) (Folder, error) {
|
||||
decoder := xml.NewDecoder(r)
|
||||
decoder.Entity = xml.HTMLEntity
|
||||
decoder.Strict = false
|
||||
decoder.CharsetReader = charset.NewReaderLabel
|
||||
|
||||
err := decoder.Decode(&val)
|
||||
if err != nil {
|
||||
|
@@ -1,6 +1,7 @@
|
||||
package opml
|
||||
|
||||
import (
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -56,3 +57,72 @@ func TestParse(t *testing.T) {
|
||||
t.Fatal("invalid opml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFallback(t *testing.T) {
|
||||
// as reported in https://github.com/nkanaev/yarr/pull/56
|
||||
// the feed below comes without `outline[text]` & `outline[type=rss]` attributes
|
||||
have, _ := Parse(strings.NewReader(`
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<opml xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" version="1.0">
|
||||
<head>
|
||||
<title>Newsflow</title>
|
||||
</head>
|
||||
<body>
|
||||
<outline title="foldertitle">
|
||||
<outline htmlUrl="https://example.com" text="feedtext" title="feedtitle" xmlUrl="https://example.com/feed.xml" />
|
||||
</outline>
|
||||
</body>
|
||||
</opml>
|
||||
`))
|
||||
want := Folder{
|
||||
Folders: []Folder{{
|
||||
Title: "foldertitle",
|
||||
Feeds: []Feed{
|
||||
{Title: "feedtext", FeedUrl: "https://example.com/feed.xml", SiteUrl: "https://example.com"},
|
||||
},
|
||||
}},
|
||||
}
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fatal("invalid opml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseWithEncoding(t *testing.T) {
|
||||
file, err := os.Open("sample_win1251.xml")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
have, err := Parse(file)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
want := Folder{
|
||||
Title: "",
|
||||
Feeds: []Feed{
|
||||
{
|
||||
Title: "пример1",
|
||||
FeedUrl: "https://baz.com/feed.xml",
|
||||
SiteUrl: "https://baz.com/",
|
||||
},
|
||||
},
|
||||
Folders: []Folder{
|
||||
{
|
||||
Title: "папка",
|
||||
Feeds: []Feed{
|
||||
{
|
||||
Title: "пример2",
|
||||
FeedUrl: "https://foo.com/feed.xml",
|
||||
SiteUrl: "https://foo.com/",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(want, have) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fatal("invalid opml")
|
||||
}
|
||||
}
|
||||
|
10
src/server/opml/sample_win1251.xml
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="windows-1251"?>
|
||||
<opml version="1.1">
|
||||
<head><title><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD></title></head>
|
||||
<body>
|
||||
<outline text="<22><><EFBFBD><EFBFBD><EFBFBD>">
|
||||
<outline type="rss" text="<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>2" description="<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>2" xmlUrl="https://foo.com/feed.xml" htmlUrl="https://foo.com/"/>
|
||||
</outline>
|
||||
<outline type="rss" text="<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>1" description="<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>1" xmlUrl="https://baz.com/feed.xml" htmlUrl="https://baz.com/"/>
|
||||
</body>
|
||||
</opml>
|
@@ -29,15 +29,15 @@ func (c *Context) JSON(status int, data interface{}) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
c.Out.WriteHeader(status)
|
||||
c.Out.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
c.Out.WriteHeader(status)
|
||||
c.Out.Write(body)
|
||||
c.Out.Write([]byte("\n"))
|
||||
}
|
||||
|
||||
func (c *Context) HTML(status int, tmpl *template.Template, data interface{}) {
|
||||
c.Out.WriteHeader(status)
|
||||
c.Out.Header().Set("Content-Type", "text/html")
|
||||
c.Out.WriteHeader(status)
|
||||
tmpl.Execute(c.Out, data)
|
||||
}
|
||||
|
||||
|
@@ -32,10 +32,13 @@ func (r *Router) Use(h Handler) {
|
||||
}
|
||||
|
||||
func (r *Router) For(path string, handler Handler) {
|
||||
chain := make([]Handler, 0)
|
||||
chain = append(chain, r.middle...)
|
||||
chain = append(chain, handler)
|
||||
|
||||
x := Route{}
|
||||
x.regex = routeRegexp(path)
|
||||
x.chain = append(r.middle, handler)
|
||||
|
||||
x.chain = chain
|
||||
r.routes = append(r.routes, x)
|
||||
}
|
||||
|
||||
|
@@ -1,15 +1,18 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/nkanaev/yarr/src/assets"
|
||||
"github.com/nkanaev/yarr/src/content/htmlutil"
|
||||
"github.com/nkanaev/yarr/src/content/readability"
|
||||
"github.com/nkanaev/yarr/src/content/sanitizer"
|
||||
"github.com/nkanaev/yarr/src/content/silo"
|
||||
@@ -31,12 +34,13 @@ func (s *Server) handler() http.Handler {
|
||||
BasePath: s.BasePath,
|
||||
Username: s.Username,
|
||||
Password: s.Password,
|
||||
Public: "/static",
|
||||
Public: []string{"/static", "/fever"},
|
||||
}
|
||||
r.Use(a.Handler)
|
||||
}
|
||||
|
||||
r.For("/", s.handleIndex)
|
||||
r.For("/manifest.json", s.handleManifest)
|
||||
r.For("/static/*path", s.handleStatic)
|
||||
r.For("/api/status", s.handleStatus)
|
||||
r.For("/api/folders", s.handleFolderList)
|
||||
@@ -53,6 +57,7 @@ func (s *Server) handler() http.Handler {
|
||||
r.For("/opml/export", s.handleOPMLExport)
|
||||
r.For("/page", s.handlePageCrawl)
|
||||
r.For("/logout", s.handleLogout)
|
||||
r.For("/fever/", s.handleFever)
|
||||
|
||||
return r
|
||||
}
|
||||
@@ -74,6 +79,24 @@ func (s *Server) handleStatic(c *router.Context) {
|
||||
http.StripPrefix(s.BasePath+"/static/", http.FileServer(http.FS(assets.FS))).ServeHTTP(c.Out, c.Req)
|
||||
}
|
||||
|
||||
func (s *Server) handleManifest(c *router.Context) {
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"$schema": "https://json.schemastore.org/web-manifest-combined.json",
|
||||
"name": "yarr!",
|
||||
"short_name": "yarr",
|
||||
"description": "yet another rss reader",
|
||||
"display": "standalone",
|
||||
"start_url": s.BasePath,
|
||||
"icons": []map[string]interface{}{
|
||||
{
|
||||
"src": s.BasePath + "/static/graphicarts/favicon.png",
|
||||
"sizes": "64x64",
|
||||
"type": "image/png",
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Server) handleStatus(c *router.Context) {
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"running": s.worker.FeedsPending(),
|
||||
@@ -143,19 +166,55 @@ func (s *Server) handleFeedErrors(c *router.Context) {
|
||||
c.JSON(http.StatusOK, errors)
|
||||
}
|
||||
|
||||
type feedicon struct {
|
||||
ctype string
|
||||
bytes []byte
|
||||
etag string
|
||||
}
|
||||
|
||||
func (s *Server) handleFeedIcon(c *router.Context) {
|
||||
id, err := c.VarInt64("id")
|
||||
if err != nil {
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
feed := s.db.GetFeed(id)
|
||||
if feed != nil && feed.Icon != nil {
|
||||
c.Out.Header().Set("Content-Type", http.DetectContentType(*feed.Icon))
|
||||
c.Out.Write(*feed.Icon)
|
||||
} else {
|
||||
c.Out.WriteHeader(http.StatusNotFound)
|
||||
|
||||
cachekey := "icon:" + strconv.FormatInt(id, 10)
|
||||
s.cache_mutex.Lock()
|
||||
cachedat := s.cache[cachekey]
|
||||
s.cache_mutex.Unlock()
|
||||
if cachedat == nil {
|
||||
feed := s.db.GetFeed(id)
|
||||
if feed == nil || feed.Icon == nil {
|
||||
c.Out.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
hash := md5.New()
|
||||
hash.Write(*feed.Icon)
|
||||
|
||||
etag := fmt.Sprintf("%x", hash.Sum(nil))[:16]
|
||||
|
||||
cachedat = feedicon{
|
||||
ctype: http.DetectContentType(*feed.Icon),
|
||||
bytes: *(*feed).Icon,
|
||||
etag: etag,
|
||||
}
|
||||
s.cache_mutex.Lock()
|
||||
s.cache[cachekey] = cachedat
|
||||
s.cache_mutex.Unlock()
|
||||
}
|
||||
|
||||
icon := cachedat.(feedicon)
|
||||
|
||||
if c.Req.Header.Get("If-None-Match") == icon.etag {
|
||||
c.Out.WriteHeader(http.StatusNotModified)
|
||||
return
|
||||
}
|
||||
|
||||
c.Out.Header().Set("Content-Type", icon.ctype)
|
||||
c.Out.Header().Set("Etag", icon.etag)
|
||||
c.Out.Write(icon.bytes)
|
||||
}
|
||||
|
||||
func (s *Server) handleFeedList(c *router.Context) {
|
||||
@@ -185,12 +244,17 @@ func (s *Server) handleFeedList(c *router.Context) {
|
||||
result.FeedLink,
|
||||
form.FolderID,
|
||||
)
|
||||
s.db.CreateItems(worker.ConvertItems(result.Feed.Items, *feed))
|
||||
items := worker.ConvertItems(result.Feed.Items, *feed)
|
||||
if len(items) > 0 {
|
||||
s.db.CreateItems(items)
|
||||
s.db.SetFeedSize(feed.Id, len(items))
|
||||
s.db.SyncSearch()
|
||||
}
|
||||
s.worker.FindFeedFavicon(*feed)
|
||||
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"status": "success",
|
||||
"feed": feed,
|
||||
"feed": feed,
|
||||
})
|
||||
default:
|
||||
c.JSON(http.StatusOK, map[string]string{"status": "notfound"})
|
||||
@@ -250,6 +314,14 @@ func (s *Server) handleItem(c *router.Context) {
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// runtime fix for relative links
|
||||
if !htmlutil.IsAPossibleLink(item.Link) {
|
||||
if feed := s.db.GetFeed(item.FeedId); feed != nil {
|
||||
item.Link = htmlutil.AbsoluteUrl(item.Link, feed.Link)
|
||||
}
|
||||
}
|
||||
|
||||
item.Content = sanitizer.Sanitize(item.Link, item.Content)
|
||||
|
||||
c.JSON(http.StatusOK, item)
|
||||
@@ -272,11 +344,8 @@ func (s *Server) handleItem(c *router.Context) {
|
||||
func (s *Server) handleItemList(c *router.Context) {
|
||||
if c.Req.Method == "GET" {
|
||||
perPage := 20
|
||||
curPage := 1
|
||||
query := c.Req.URL.Query()
|
||||
if page, err := c.QueryInt64("page"); err == nil {
|
||||
curPage = int(page)
|
||||
}
|
||||
|
||||
filter := storage.ItemFilter{}
|
||||
if folderID, err := c.QueryInt64("folder_id"); err == nil {
|
||||
filter.FolderID = &folderID
|
||||
@@ -284,6 +353,9 @@ func (s *Server) handleItemList(c *router.Context) {
|
||||
if feedID, err := c.QueryInt64("feed_id"); err == nil {
|
||||
filter.FeedID = &feedID
|
||||
}
|
||||
if after, err := c.QueryInt64("after"); err == nil {
|
||||
filter.After = &after
|
||||
}
|
||||
if status := query.Get("status"); len(status) != 0 {
|
||||
statusValue := storage.StatusValues[status]
|
||||
filter.Status = &statusValue
|
||||
@@ -292,14 +364,16 @@ func (s *Server) handleItemList(c *router.Context) {
|
||||
filter.Search = &search
|
||||
}
|
||||
newestFirst := query.Get("oldest_first") != "true"
|
||||
items := s.db.ListItems(filter, (curPage-1)*perPage, perPage, newestFirst)
|
||||
count := s.db.CountItems(filter)
|
||||
|
||||
items := s.db.ListItems(filter, perPage+1, newestFirst, false)
|
||||
hasMore := false
|
||||
if len(items) == perPage+1 {
|
||||
hasMore = true
|
||||
items = items[:perPage]
|
||||
}
|
||||
c.JSON(http.StatusOK, map[string]interface{}{
|
||||
"page": map[string]int{
|
||||
"cur": curPage,
|
||||
"num": int(math.Ceil(float64(count) / float64(perPage))),
|
||||
},
|
||||
"list": items,
|
||||
"list": items,
|
||||
"has_more": hasMore,
|
||||
})
|
||||
} else if c.Req.Method == "PUT" {
|
||||
filter := storage.MarkFilter{}
|
||||
@@ -414,24 +488,27 @@ func (s *Server) handleOPMLExport(c *router.Context) {
|
||||
func (s *Server) handlePageCrawl(c *router.Context) {
|
||||
url := c.Req.URL.Query().Get("url")
|
||||
|
||||
if newUrl := silo.RedirectURL(url); newUrl != "" {
|
||||
url = newUrl
|
||||
}
|
||||
if content := silo.VideoIFrame(url); content != "" {
|
||||
c.JSON(http.StatusOK, map[string]string{
|
||||
"content": content,
|
||||
"content": sanitizer.Sanitize(url, content),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
res, err := http.Get(url)
|
||||
body, err := worker.GetBody(url)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
c.Out.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer res.Body.Close()
|
||||
content, err := readability.ExtractContent(res.Body)
|
||||
content, err := readability.ExtractContent(strings.NewReader(body))
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
c.Out.WriteHeader(http.StatusNoContent)
|
||||
c.JSON(http.StatusOK, map[string]string{
|
||||
"content": "error: " + err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
content = sanitizer.Sanitize(url, content)
|
||||
|
@@ -1,8 +1,16 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
)
|
||||
|
||||
func TestStatic(t *testing.T) {
|
||||
@@ -43,3 +51,64 @@ func TestStaticBanTemplates(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexGzipped(t *testing.T) {
|
||||
log.SetOutput(io.Discard)
|
||||
db, _ := storage.New(":memory:")
|
||||
log.SetOutput(os.Stderr)
|
||||
handler := NewServer(db, "127.0.0.1:8000").handler()
|
||||
url := "/"
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
request := httptest.NewRequest("GET", url, nil)
|
||||
request.Header.Set("accept-encoding", "gzip")
|
||||
handler.ServeHTTP(recorder, request)
|
||||
response := recorder.Result()
|
||||
if response.StatusCode != 200 {
|
||||
t.FailNow()
|
||||
}
|
||||
if response.Header.Get("content-encoding") != "gzip" {
|
||||
t.Errorf("invalid content-encoding header: %#v", response.Header.Get("content-encoding"))
|
||||
}
|
||||
if response.Header.Get("content-type") != "text/html" {
|
||||
t.Errorf("invalid content-type header: %#v", response.Header.Get("content-type"))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeedIcons(t *testing.T) {
|
||||
log.SetOutput(io.Discard)
|
||||
db, _ := storage.New(":memory:")
|
||||
icon := []byte("test")
|
||||
feed := db.CreateFeed("", "", "", "", nil)
|
||||
db.UpdateFeedIcon(feed.Id, &icon)
|
||||
log.SetOutput(os.Stderr)
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
url := fmt.Sprintf("/api/feeds/%d/icon", feed.Id)
|
||||
request := httptest.NewRequest("GET", url, nil)
|
||||
|
||||
handler := NewServer(db, "127.0.0.1:8000").handler()
|
||||
handler.ServeHTTP(recorder, request)
|
||||
response := recorder.Result()
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
t.Fatal()
|
||||
}
|
||||
body, _ := io.ReadAll(response.Body)
|
||||
if !reflect.DeepEqual(body, icon) {
|
||||
t.Fatal()
|
||||
}
|
||||
if response.Header.Get("Etag") == "" {
|
||||
t.Fatal()
|
||||
}
|
||||
|
||||
recorder2 := httptest.NewRecorder()
|
||||
request2 := httptest.NewRequest("GET", url, nil)
|
||||
request2.Header.Set("If-None-Match", response.Header.Get("Etag"))
|
||||
handler.ServeHTTP(recorder2, request2)
|
||||
response2 := recorder2.Result()
|
||||
|
||||
if response2.StatusCode != http.StatusNotModified {
|
||||
t.Fatal("got", response2.StatusCode)
|
||||
}
|
||||
}
|
||||
|
@@ -3,15 +3,18 @@ package server
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
"github.com/nkanaev/yarr/src/worker"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
Addr string
|
||||
db *storage.Storage
|
||||
worker *worker.Worker
|
||||
Addr string
|
||||
db *storage.Storage
|
||||
worker *worker.Worker
|
||||
cache map[string]interface{}
|
||||
cache_mutex *sync.Mutex
|
||||
|
||||
BasePath string
|
||||
|
||||
@@ -25,9 +28,11 @@ type Server struct {
|
||||
|
||||
func NewServer(db *storage.Storage, addr string) *Server {
|
||||
return &Server{
|
||||
db: db,
|
||||
Addr: addr,
|
||||
worker: worker.NewWorker(db),
|
||||
db: db,
|
||||
Addr: addr,
|
||||
worker: worker.NewWorker(db),
|
||||
cache: make(map[string]interface{}),
|
||||
cache_mutex: &sync.Mutex{},
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -20,18 +20,19 @@ func (s *Storage) CreateFeed(title, description, link, feedLink string, folderId
|
||||
if title == "" {
|
||||
title = feedLink
|
||||
}
|
||||
result, err := s.db.Exec(`
|
||||
row := s.db.QueryRow(`
|
||||
insert into feeds (title, description, link, feed_link, folder_id)
|
||||
values (?, ?, ?, ?, ?)
|
||||
on conflict (feed_link) do update set folder_id=?`,
|
||||
on conflict (feed_link) do update set folder_id = ?
|
||||
returning id`,
|
||||
title, description, link, feedLink, folderId,
|
||||
folderId,
|
||||
)
|
||||
|
||||
var id int64
|
||||
err := row.Scan(&id)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
id, idErr := result.LastInsertId()
|
||||
if idErr != nil {
|
||||
log.Print(err)
|
||||
return nil
|
||||
}
|
||||
return &Feed{
|
||||
@@ -76,10 +77,10 @@ func (s *Storage) UpdateFeedIcon(feedId int64, icon *[]byte) bool {
|
||||
}
|
||||
|
||||
func (s *Storage) ListFeeds() []Feed {
|
||||
result := make([]Feed, 0, 0)
|
||||
result := make([]Feed, 0)
|
||||
rows, err := s.db.Query(`
|
||||
select id, folder_id, title, description, link, feed_link,
|
||||
ifnull(icon, '') != '' as has_icon
|
||||
ifnull(length(icon), 0) > 0 as has_icon
|
||||
from feeds
|
||||
order by title collate nocase
|
||||
`)
|
||||
@@ -107,6 +108,36 @@ func (s *Storage) ListFeeds() []Feed {
|
||||
return result
|
||||
}
|
||||
|
||||
func (s *Storage) ListFeedsMissingIcons() []Feed {
|
||||
result := make([]Feed, 0)
|
||||
rows, err := s.db.Query(`
|
||||
select id, folder_id, title, description, link, feed_link
|
||||
from feeds
|
||||
where icon is null
|
||||
`)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return result
|
||||
}
|
||||
for rows.Next() {
|
||||
var f Feed
|
||||
err = rows.Scan(
|
||||
&f.Id,
|
||||
&f.FolderId,
|
||||
&f.Title,
|
||||
&f.Description,
|
||||
&f.Link,
|
||||
&f.FeedLink,
|
||||
)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return result
|
||||
}
|
||||
result = append(result, f)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (s *Storage) GetFeed(id int64) *Feed {
|
||||
var f Feed
|
||||
err := s.db.QueryRow(`
|
||||
@@ -164,3 +195,15 @@ func (s *Storage) GetFeedErrors() map[int64]string {
|
||||
}
|
||||
return errors
|
||||
}
|
||||
|
||||
func (s *Storage) SetFeedSize(feedId int64, size int) {
|
||||
_, err := s.db.Exec(`
|
||||
insert into feed_sizes (feed_id, size)
|
||||
values (?, ?)
|
||||
on conflict (feed_id) do update set size = excluded.size`,
|
||||
feedId, size,
|
||||
)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
}
|
||||
}
|
||||
|
@@ -17,6 +17,23 @@ func TestCreateFeed(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedSameLink(t *testing.T) {
|
||||
db := testDB()
|
||||
feed1 := db.CreateFeed("title", "", "", "http://example1.com/feed.xml", nil)
|
||||
if feed1 == nil || feed1.Id == 0 {
|
||||
t.Fatal("expected feed")
|
||||
}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
db.CreateFeed("title", "", "", "http://example2.com/feed.xml", nil)
|
||||
}
|
||||
|
||||
feed2 := db.CreateFeed("title", "", "http://example.com", "http://example1.com/feed.xml", nil)
|
||||
if feed1.Id != feed2.Id {
|
||||
t.Fatalf("expected the same feed.\nwant: %#v\nhave: %#v", feed1, feed2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadFeed(t *testing.T) {
|
||||
db := testDB()
|
||||
if db.GetFeed(100500) != nil {
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
)
|
||||
|
||||
@@ -13,35 +12,21 @@ type Folder struct {
|
||||
|
||||
func (s *Storage) CreateFolder(title string) *Folder {
|
||||
expanded := true
|
||||
result, err := s.db.Exec(`
|
||||
row := s.db.QueryRow(`
|
||||
insert into folders (title, is_expanded) values (?, ?)
|
||||
on conflict (title) do nothing`,
|
||||
on conflict (title) do update set title = ?
|
||||
returning id`,
|
||||
title, expanded,
|
||||
// provide title again so that we can extract row id
|
||||
title,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return nil
|
||||
}
|
||||
|
||||
var id int64
|
||||
numrows, err := result.RowsAffected()
|
||||
err := row.Scan(&id)
|
||||
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return nil
|
||||
}
|
||||
if numrows == 1 {
|
||||
id, err = result.LastInsertId()
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err = s.db.QueryRow(`select id, is_expanded from folders where title=?`, title).Scan(&id, &expanded)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return &Folder{Id: id, Title: title, IsExpanded: expanded}
|
||||
}
|
||||
|
||||
|
@@ -4,6 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -61,13 +62,39 @@ type ItemFilter struct {
|
||||
FeedID *int64
|
||||
Status *ItemStatus
|
||||
Search *string
|
||||
After *int64
|
||||
IDs *[]int64
|
||||
SinceID *int64
|
||||
MaxID *int64
|
||||
Before *time.Time
|
||||
}
|
||||
|
||||
type MarkFilter struct {
|
||||
FolderID *int64
|
||||
FeedID *int64
|
||||
|
||||
Before *time.Time
|
||||
}
|
||||
|
||||
type ItemList []Item
|
||||
|
||||
func (list ItemList) Len() int {
|
||||
return len(list)
|
||||
}
|
||||
|
||||
func (list ItemList) SortKey(i int) string {
|
||||
return list[i].Date.Format(time.RFC3339) + "::" + list[i].GUID
|
||||
}
|
||||
|
||||
func (list ItemList) Less(i, j int) bool {
|
||||
return list.SortKey(i) < list.SortKey(j)
|
||||
}
|
||||
|
||||
func (list ItemList) Swap(i, j int) {
|
||||
list[i], list[j] = list[j], list[i]
|
||||
}
|
||||
|
||||
|
||||
func (s *Storage) CreateItems(items []Item) bool {
|
||||
tx, err := s.db.Begin()
|
||||
if err != nil {
|
||||
@@ -75,16 +102,19 @@ func (s *Storage) CreateItems(items []Item) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
now := time.Now().UTC()
|
||||
|
||||
for _, item := range items {
|
||||
itemsSorted := ItemList(items)
|
||||
sort.Sort(itemsSorted)
|
||||
|
||||
for _, item := range itemsSorted {
|
||||
_, err = tx.Exec(`
|
||||
insert into items (
|
||||
guid, feed_id, title, link, date,
|
||||
content, image, podcast_url,
|
||||
date_arrived, status
|
||||
)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
values (?, ?, ?, ?, strftime('%Y-%m-%d %H:%M:%f', ?), ?, ?, ?, ?, ?)
|
||||
on conflict (feed_id, guid) do nothing`,
|
||||
item.GUID, item.FeedId, item.Title, item.Link, item.Date,
|
||||
item.Content, item.ImageURL, item.AudioURL,
|
||||
@@ -106,7 +136,7 @@ func (s *Storage) CreateItems(items []Item) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func listQueryPredicate(filter ItemFilter) (string, []interface{}) {
|
||||
func listQueryPredicate(filter ItemFilter, newestFirst bool) (string, []interface{}) {
|
||||
cond := make([]string, 0)
|
||||
args := make([]interface{}, 0)
|
||||
if filter.FolderID != nil {
|
||||
@@ -131,6 +161,36 @@ func listQueryPredicate(filter ItemFilter) (string, []interface{}) {
|
||||
cond = append(cond, "i.search_rowid in (select rowid from search where search match ?)")
|
||||
args = append(args, strings.Join(terms, " "))
|
||||
}
|
||||
if filter.After != nil {
|
||||
compare := ">"
|
||||
if newestFirst {
|
||||
compare = "<"
|
||||
}
|
||||
cond = append(cond, fmt.Sprintf("(i.date, i.id) %s (select date, id from items where id = ?)", compare))
|
||||
args = append(args, *filter.After)
|
||||
}
|
||||
if filter.IDs != nil && len(*filter.IDs) > 0 {
|
||||
qmarks := make([]string, len(*filter.IDs))
|
||||
idargs := make([]interface{}, len(*filter.IDs))
|
||||
for i, id := range *filter.IDs {
|
||||
qmarks[i] = "?"
|
||||
idargs[i] = id
|
||||
}
|
||||
cond = append(cond, "i.id in ("+strings.Join(qmarks, ",")+")")
|
||||
args = append(args, idargs...)
|
||||
}
|
||||
if filter.SinceID != nil {
|
||||
cond = append(cond, "i.id > ?")
|
||||
args = append(args, filter.SinceID)
|
||||
}
|
||||
if filter.MaxID != nil {
|
||||
cond = append(cond, "i.id < ?")
|
||||
args = append(args, filter.MaxID)
|
||||
}
|
||||
if filter.Before != nil {
|
||||
cond = append(cond, "i.date < ?")
|
||||
args = append(args, filter.Before)
|
||||
}
|
||||
|
||||
predicate := "1"
|
||||
if len(cond) > 0 {
|
||||
@@ -140,25 +200,51 @@ func listQueryPredicate(filter ItemFilter) (string, []interface{}) {
|
||||
return predicate, args
|
||||
}
|
||||
|
||||
func (s *Storage) ListItems(filter ItemFilter, offset, limit int, newestFirst bool) []Item {
|
||||
predicate, args := listQueryPredicate(filter)
|
||||
func (s *Storage) CountItems(filter ItemFilter) int {
|
||||
predicate, args := listQueryPredicate(filter, false)
|
||||
|
||||
var count int
|
||||
query := fmt.Sprintf(`
|
||||
select count(*)
|
||||
from items
|
||||
where %s
|
||||
`, predicate)
|
||||
err := s.db.QueryRow(query, args...).Scan(&count)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return 0
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *Storage) ListItems(filter ItemFilter, limit int, newestFirst bool, withContent bool) []Item {
|
||||
predicate, args := listQueryPredicate(filter, newestFirst)
|
||||
result := make([]Item, 0, 0)
|
||||
|
||||
order := "date desc"
|
||||
order := "date desc, id desc"
|
||||
if !newestFirst {
|
||||
order = "date asc"
|
||||
order = "date asc, id asc"
|
||||
}
|
||||
if filter.IDs != nil || filter.SinceID != nil {
|
||||
order = "i.id asc"
|
||||
}
|
||||
if filter.MaxID != nil {
|
||||
order = "i.id desc"
|
||||
}
|
||||
|
||||
selectCols := "i.id, i.guid, i.feed_id, i.title, i.link, i.date, i.status, i.image, i.podcast_url"
|
||||
if withContent {
|
||||
selectCols += ", i.content"
|
||||
} else {
|
||||
selectCols += ", '' as content"
|
||||
}
|
||||
query := fmt.Sprintf(`
|
||||
select
|
||||
i.id, i.guid, i.feed_id,
|
||||
i.title, i.link, i.date,
|
||||
i.status, i.image, i.podcast_url
|
||||
select %s
|
||||
from items i
|
||||
where %s
|
||||
order by %s
|
||||
limit %d offset %d
|
||||
`, predicate, order, limit, offset)
|
||||
limit %d
|
||||
`, selectCols, predicate, order, limit)
|
||||
rows, err := s.db.Query(query, args...)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
@@ -169,7 +255,7 @@ func (s *Storage) ListItems(filter ItemFilter, offset, limit int, newestFirst bo
|
||||
err = rows.Scan(
|
||||
&x.Id, &x.GUID, &x.FeedId,
|
||||
&x.Title, &x.Link, &x.Date,
|
||||
&x.Status, &x.ImageURL, &x.AudioURL,
|
||||
&x.Status, &x.ImageURL, &x.AudioURL, &x.Content,
|
||||
)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
@@ -199,28 +285,17 @@ func (s *Storage) GetItem(id int64) *Item {
|
||||
return i
|
||||
}
|
||||
|
||||
func (s *Storage) CountItems(filter ItemFilter) int64 {
|
||||
predicate, args := listQueryPredicate(filter)
|
||||
query := fmt.Sprintf(`
|
||||
select count(i.id)
|
||||
from items i
|
||||
where %s`, predicate)
|
||||
row := s.db.QueryRow(query, args...)
|
||||
if row != nil {
|
||||
var result int64
|
||||
row.Scan(&result)
|
||||
return result
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (s *Storage) UpdateItemStatus(item_id int64, status ItemStatus) bool {
|
||||
_, err := s.db.Exec(`update items set status = ? where id = ?`, status, item_id)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func (s *Storage) MarkItemsRead(filter MarkFilter) bool {
|
||||
predicate, args := listQueryPredicate(ItemFilter{FolderID: filter.FolderID, FeedID: filter.FeedID})
|
||||
predicate, args := listQueryPredicate(ItemFilter{
|
||||
FolderID: filter.FolderID,
|
||||
FeedID: filter.FeedID,
|
||||
Before: filter.Before,
|
||||
}, false)
|
||||
query := fmt.Sprintf(`
|
||||
update items as i set status = %d
|
||||
where %s and i.status != %d
|
||||
@@ -298,45 +373,70 @@ func (s *Storage) SyncSearch() {
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
itemsKeepSize = 50
|
||||
itemsKeepDays = 90
|
||||
)
|
||||
|
||||
// Delete old articles from the database to cleanup space.
|
||||
//
|
||||
// The rules:
|
||||
// - Never delete starred entries.
|
||||
// - Keep at least the same amount of articles the feed provides (default: 50).
|
||||
// This prevents from deleting items for rarely updated and/or ever-growing
|
||||
// feeds which might eventually reappear as unread.
|
||||
// - Keep entries for a certain period (default: 90 days).
|
||||
func (s *Storage) DeleteOldItems() {
|
||||
rows, err := s.db.Query(fmt.Sprintf(`
|
||||
select feed_id, count(*) as num_items
|
||||
from items
|
||||
where status != %d
|
||||
group by feed_id
|
||||
having num_items > 50
|
||||
`, STARRED))
|
||||
rows, err := s.db.Query(`
|
||||
select
|
||||
i.feed_id,
|
||||
max(coalesce(s.size, 0), ?) as max_items,
|
||||
count(*) as num_items
|
||||
from items i
|
||||
left outer join feed_sizes s on s.feed_id = i.feed_id
|
||||
where status != ?
|
||||
group by i.feed_id
|
||||
`, itemsKeepSize, STARRED)
|
||||
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return
|
||||
}
|
||||
|
||||
feedIds := make([]int64, 0)
|
||||
feedLimits := make(map[int64]int64, 0)
|
||||
for rows.Next() {
|
||||
var id int64
|
||||
rows.Scan(&id, nil)
|
||||
feedIds = append(feedIds, id)
|
||||
var feedId, limit int64
|
||||
rows.Scan(&feedId, &limit, nil)
|
||||
feedLimits[feedId] = limit
|
||||
}
|
||||
|
||||
for _, feedId := range feedIds {
|
||||
for feedId, limit := range feedLimits {
|
||||
result, err := s.db.Exec(`
|
||||
delete from items where feed_id = ? and status != ? and date_arrived < ?`,
|
||||
delete from items
|
||||
where id in (
|
||||
select i.id
|
||||
from items i
|
||||
where i.feed_id = ? and status != ?
|
||||
order by date desc
|
||||
limit -1 offset ?
|
||||
) and date_arrived < ?
|
||||
`,
|
||||
feedId,
|
||||
STARRED,
|
||||
time.Now().Add(-time.Hour*24*90), // 90 days
|
||||
limit,
|
||||
time.Now().UTC().Add(-time.Hour*time.Duration(24*itemsKeepDays)),
|
||||
)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return
|
||||
}
|
||||
num, err := result.RowsAffected()
|
||||
numDeleted, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return
|
||||
}
|
||||
if num > 0 {
|
||||
log.Printf("Deleted %d old items (%d)", num, feedId)
|
||||
if numDeleted > 0 {
|
||||
log.Printf("Deleted %d old items (feed: %d)", numDeleted, feedId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,7 +1,9 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"log"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
@@ -44,18 +46,18 @@ func testItemsSetup(db *Storage) testItemScope {
|
||||
db.CreateItems([]Item{
|
||||
// feed11
|
||||
{GUID: "item111", FeedId: feed11.Id, Title: "title111", Date: now.Add(time.Hour * 24 * 1)},
|
||||
{GUID: "item112", FeedId: feed11.Id, Title: "title112", Date: now.Add(time.Hour * 24 * 2)},
|
||||
{GUID: "item113", FeedId: feed11.Id, Title: "title113", Date: now.Add(time.Hour * 24 * 3)},
|
||||
{GUID: "item112", FeedId: feed11.Id, Title: "title112", Date: now.Add(time.Hour * 24 * 2)}, // read
|
||||
{GUID: "item113", FeedId: feed11.Id, Title: "title113", Date: now.Add(time.Hour * 24 * 3)}, // starred
|
||||
// feed12
|
||||
{GUID: "item121", FeedId: feed12.Id, Title: "title121", Date: now.Add(time.Hour * 24 * 4)},
|
||||
{GUID: "item122", FeedId: feed12.Id, Title: "title122", Date: now.Add(time.Hour * 24 * 5)},
|
||||
{GUID: "item122", FeedId: feed12.Id, Title: "title122", Date: now.Add(time.Hour * 24 * 5)}, // read
|
||||
// feed21
|
||||
{GUID: "item211", FeedId: feed21.Id, Title: "title211", Date: now.Add(time.Hour * 24 * 6)},
|
||||
{GUID: "item212", FeedId: feed21.Id, Title: "title212", Date: now.Add(time.Hour * 24 * 7)},
|
||||
{GUID: "item211", FeedId: feed21.Id, Title: "title211", Date: now.Add(time.Hour * 24 * 6)}, // read
|
||||
{GUID: "item212", FeedId: feed21.Id, Title: "title212", Date: now.Add(time.Hour * 24 * 7)}, // starred
|
||||
// feed01
|
||||
{GUID: "item011", FeedId: feed01.Id, Title: "title011", Date: now.Add(time.Hour * 24 * 8)},
|
||||
{GUID: "item012", FeedId: feed01.Id, Title: "title012", Date: now.Add(time.Hour * 24 * 9)},
|
||||
{GUID: "item013", FeedId: feed01.Id, Title: "title013", Date: now.Add(time.Hour * 24 * 10)},
|
||||
{GUID: "item012", FeedId: feed01.Id, Title: "title012", Date: now.Add(time.Hour * 24 * 9)}, // read
|
||||
{GUID: "item013", FeedId: feed01.Id, Title: "title013", Date: now.Add(time.Hour * 24 * 10)}, // starred
|
||||
})
|
||||
db.db.Exec(`update items set status = ? where guid in ("item112", "item122", "item211", "item012")`, READ)
|
||||
db.db.Exec(`update items set status = ? where guid in ("item113", "item212", "item013")`, STARRED)
|
||||
@@ -70,7 +72,25 @@ func testItemsSetup(db *Storage) testItemScope {
|
||||
}
|
||||
}
|
||||
|
||||
func testItemGuids(items []Item) []string {
|
||||
func getItem(db *Storage, guid string) *Item {
|
||||
i := &Item{}
|
||||
err := db.db.QueryRow(`
|
||||
select
|
||||
i.id, i.guid, i.feed_id, i.title, i.link, i.content,
|
||||
i.date, i.status, i.image, i.podcast_url
|
||||
from items i
|
||||
where i.guid = ?
|
||||
`, guid).Scan(
|
||||
&i.Id, &i.GUID, &i.FeedId, &i.Title, &i.Link, &i.Content,
|
||||
&i.Date, &i.Status, &i.ImageURL, &i.AudioURL,
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func getItemGuids(items []Item) []string {
|
||||
guids := make([]string, 0)
|
||||
for _, item := range items {
|
||||
guids = append(guids, item.GUID)
|
||||
@@ -84,7 +104,7 @@ func TestListItems(t *testing.T) {
|
||||
|
||||
// filter by folder_id
|
||||
|
||||
have := testItemGuids(db.ListItems(ItemFilter{FolderID: &scope.folder1.Id}, 0, 10, false))
|
||||
have := getItemGuids(db.ListItems(ItemFilter{FolderID: &scope.folder1.Id}, 10, false, false))
|
||||
want := []string{"item111", "item112", "item113", "item121", "item122"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -92,7 +112,7 @@ func TestListItems(t *testing.T) {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
have = testItemGuids(db.ListItems(ItemFilter{FolderID: &scope.folder2.Id}, 0, 10, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{FolderID: &scope.folder2.Id}, 10, false, false))
|
||||
want = []string{"item211", "item212"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -102,7 +122,7 @@ func TestListItems(t *testing.T) {
|
||||
|
||||
// filter by feed_id
|
||||
|
||||
have = testItemGuids(db.ListItems(ItemFilter{FeedID: &scope.feed11.Id}, 0, 10, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{FeedID: &scope.feed11.Id}, 10, false, false))
|
||||
want = []string{"item111", "item112", "item113"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -110,7 +130,7 @@ func TestListItems(t *testing.T) {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
have = testItemGuids(db.ListItems(ItemFilter{FeedID: &scope.feed01.Id}, 0, 10, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{FeedID: &scope.feed01.Id}, 10, false, false))
|
||||
want = []string{"item011", "item012", "item013"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -121,7 +141,7 @@ func TestListItems(t *testing.T) {
|
||||
// filter by status
|
||||
|
||||
var starred ItemStatus = STARRED
|
||||
have = testItemGuids(db.ListItems(ItemFilter{Status: &starred}, 0, 10, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{Status: &starred}, 10, false, false))
|
||||
want = []string{"item113", "item212", "item013"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -130,7 +150,7 @@ func TestListItems(t *testing.T) {
|
||||
}
|
||||
|
||||
var unread ItemStatus = UNREAD
|
||||
have = testItemGuids(db.ListItems(ItemFilter{Status: &unread}, 0, 10, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{Status: &unread}, 10, false, false))
|
||||
want = []string{"item111", "item121", "item011"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -138,9 +158,9 @@ func TestListItems(t *testing.T) {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// filter by offset,limit
|
||||
// limit
|
||||
|
||||
have = testItemGuids(db.ListItems(ItemFilter{}, 0, 2, false))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{}, 2, false, false))
|
||||
want = []string{"item111", "item112"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -148,18 +168,10 @@ func TestListItems(t *testing.T) {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
have = testItemGuids(db.ListItems(ItemFilter{}, 2, 3, false))
|
||||
want = []string{"item113", "item121", "item122"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// filter by search
|
||||
db.SyncSearch()
|
||||
search1 := "title111"
|
||||
have = testItemGuids(db.ListItems(ItemFilter{Search: &search1}, 0, 4, true))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{Search: &search1}, 4, true, false))
|
||||
want = []string{"item111"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -168,7 +180,7 @@ func TestListItems(t *testing.T) {
|
||||
}
|
||||
|
||||
// sort by date
|
||||
have = testItemGuids(db.ListItems(ItemFilter{}, 0, 4, true))
|
||||
have = getItemGuids(db.ListItems(ItemFilter{}, 4, true, false))
|
||||
want = []string{"item013", "item012", "item011", "item212"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
@@ -177,72 +189,37 @@ func TestListItems(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCountItems(t *testing.T) {
|
||||
func TestListItemsPaginated(t *testing.T) {
|
||||
db := testDB()
|
||||
scope := testItemsSetup(db)
|
||||
testItemsSetup(db)
|
||||
|
||||
have := db.CountItems(ItemFilter{})
|
||||
want := int64(10)
|
||||
if have != want {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// folders
|
||||
item012 := getItem(db, "item012")
|
||||
item121 := getItem(db, "item121")
|
||||
|
||||
have = db.CountItems(ItemFilter{FolderID: &scope.folder1.Id})
|
||||
want = int64(5)
|
||||
if have != want {
|
||||
// all, newest first
|
||||
have := getItemGuids(db.ListItems(ItemFilter{After: &item012.Id}, 3, true, false))
|
||||
want := []string{"item011", "item212", "item211"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
have = db.CountItems(ItemFilter{FolderID: &scope.folder2.Id})
|
||||
want = int64(2)
|
||||
if have != want {
|
||||
// unread, newest first
|
||||
unread := UNREAD
|
||||
have = getItemGuids(db.ListItems(ItemFilter{After: &item012.Id, Status: &unread}, 3, true, false))
|
||||
want = []string{"item011", "item121", "item111"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// feeds
|
||||
|
||||
have = db.CountItems(ItemFilter{FeedID: &scope.feed21.Id})
|
||||
want = int64(2)
|
||||
if have != want {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
have = db.CountItems(ItemFilter{FeedID: &scope.feed01.Id})
|
||||
want = int64(3)
|
||||
if have != want {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// statuses
|
||||
|
||||
var unread ItemStatus = UNREAD
|
||||
have = db.CountItems(ItemFilter{Status: &unread})
|
||||
want = int64(3)
|
||||
if have != want {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
// search
|
||||
|
||||
db.SyncSearch()
|
||||
search := "title0"
|
||||
have = db.CountItems(ItemFilter{Search: &search})
|
||||
want = int64(3)
|
||||
if have != want {
|
||||
// starred, oldest first
|
||||
starred := STARRED
|
||||
have = getItemGuids(db.ListItems(ItemFilter{After: &item121.Id, Status: &starred}, 3, false, false))
|
||||
want = []string{"item212", "item013"}
|
||||
if !reflect.DeepEqual(have, want) {
|
||||
t.Logf("want: %#v", want)
|
||||
t.Logf("have: %#v", have)
|
||||
t.Fail()
|
||||
@@ -256,7 +233,7 @@ func TestMarkItemsRead(t *testing.T) {
|
||||
db1 := testDB()
|
||||
testItemsSetup(db1)
|
||||
db1.MarkItemsRead(MarkFilter{})
|
||||
have := testItemGuids(db1.ListItems(ItemFilter{Status: &read}, 0, 10, false))
|
||||
have := getItemGuids(db1.ListItems(ItemFilter{Status: &read}, 10, false, false))
|
||||
want := []string{
|
||||
"item111", "item112", "item121", "item122",
|
||||
"item211", "item011", "item012",
|
||||
@@ -270,7 +247,7 @@ func TestMarkItemsRead(t *testing.T) {
|
||||
db2 := testDB()
|
||||
scope2 := testItemsSetup(db2)
|
||||
db2.MarkItemsRead(MarkFilter{FolderID: &scope2.folder1.Id})
|
||||
have = testItemGuids(db2.ListItems(ItemFilter{Status: &read}, 0, 10, false))
|
||||
have = getItemGuids(db2.ListItems(ItemFilter{Status: &read}, 10, false, false))
|
||||
want = []string{
|
||||
"item111", "item112", "item121", "item122",
|
||||
"item211", "item012",
|
||||
@@ -284,7 +261,7 @@ func TestMarkItemsRead(t *testing.T) {
|
||||
db3 := testDB()
|
||||
scope3 := testItemsSetup(db3)
|
||||
db3.MarkItemsRead(MarkFilter{FeedID: &scope3.feed11.Id})
|
||||
have = testItemGuids(db3.ListItems(ItemFilter{Status: &read}, 0, 10, false))
|
||||
have = getItemGuids(db3.ListItems(ItemFilter{Status: &read}, 10, false, false))
|
||||
want = []string{
|
||||
"item111", "item112", "item122",
|
||||
"item211", "item012",
|
||||
@@ -295,3 +272,59 @@ func TestMarkItemsRead(t *testing.T) {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteOldItems(t *testing.T) {
|
||||
extraItems := 10
|
||||
|
||||
now := time.Now().UTC()
|
||||
db := testDB()
|
||||
feed := db.CreateFeed("feed", "", "", "http://test.com/feed11.xml", nil)
|
||||
|
||||
items := make([]Item, 0)
|
||||
for i := 0; i < itemsKeepSize+extraItems; i++ {
|
||||
istr := strconv.Itoa(i)
|
||||
items = append(items, Item{
|
||||
GUID: istr,
|
||||
FeedId: feed.Id,
|
||||
Title: istr,
|
||||
Date: now.Add(time.Hour * time.Duration(i)),
|
||||
})
|
||||
}
|
||||
db.CreateItems(items)
|
||||
|
||||
db.SetFeedSize(feed.Id, itemsKeepSize)
|
||||
var feedSize int
|
||||
err := db.db.QueryRow(
|
||||
`select size from feed_sizes where feed_id = ?`, feed.Id,
|
||||
).Scan(&feedSize)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if feedSize != itemsKeepSize {
|
||||
t.Fatalf(
|
||||
"expected feed size to get updated\nwant: %d\nhave: %d",
|
||||
itemsKeepSize+extraItems,
|
||||
feedSize,
|
||||
)
|
||||
}
|
||||
|
||||
// expire only the first 3 articles
|
||||
_, err = db.db.Exec(
|
||||
`update items set date_arrived = ?
|
||||
where id in (select id from items limit 3)`,
|
||||
now.Add(-time.Hour*time.Duration(itemsKeepDays*24)),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
db.DeleteOldItems()
|
||||
feedItems := db.ListItems(ItemFilter{FeedID: &feed.Id}, 1000, false, false)
|
||||
if len(feedItems) != len(items)-3 {
|
||||
t.Fatalf(
|
||||
"invalid number of old items kept\nwant: %d\nhave: %d",
|
||||
len(items)-3,
|
||||
len(feedItems),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
var migrations = []func(*sql.Tx) error{
|
||||
@@ -13,6 +14,8 @@ var migrations = []func(*sql.Tx) error{
|
||||
m04_item_podcasturl,
|
||||
m05_move_description_to_content,
|
||||
m06_fill_missing_dates,
|
||||
m07_add_feed_size,
|
||||
m08_normalize_datetime,
|
||||
}
|
||||
|
||||
var maxVersion = int64(len(migrations))
|
||||
@@ -259,3 +262,35 @@ func m06_fill_missing_dates(tx *sql.Tx) error {
|
||||
_, err := tx.Exec(sql)
|
||||
return err
|
||||
}
|
||||
|
||||
func m07_add_feed_size(tx *sql.Tx) error {
|
||||
sql := `
|
||||
create table if not exists feed_sizes (
|
||||
feed_id references feeds(id) on delete cascade unique,
|
||||
size integer not null default 0
|
||||
);
|
||||
`
|
||||
_, err := tx.Exec(sql)
|
||||
return err
|
||||
}
|
||||
|
||||
func m08_normalize_datetime(tx *sql.Tx) error {
|
||||
rows, err := tx.Query(`select id, date_arrived from items;`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for rows.Next() {
|
||||
var id int64
|
||||
var dateArrived time.Time
|
||||
err = rows.Scan(&id, &dateArrived)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`update items set date_arrived = ? where id = ?;`, dateArrived.UTC(), id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
_, err = tx.Exec(`update items set date = strftime('%Y-%m-%d %H:%M:%f', date);`)
|
||||
return err
|
||||
}
|
||||
|
@@ -15,6 +15,7 @@ func New(path string) (*Storage, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO: https://foxcpp.dev/articles/the-right-way-to-use-go-sqlite3
|
||||
db.SetMaxOpenConns(1)
|
||||
|
||||
if err = migrate(db); err != nil {
|
||||
|
@@ -11,6 +11,7 @@ func testDB() *Storage {
|
||||
log.SetOutput(io.Discard)
|
||||
db, _ := New(":memory:")
|
||||
log.SetOutput(os.Stderr)
|
||||
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
|
||||
return db
|
||||
}
|
||||
|
||||
|
@@ -9,3 +9,4 @@ hash:
|
||||
changes:
|
||||
|
||||
-removed `getlantern/golog` dependency
|
||||
-prevent from compiling in linux
|
||||
|
@@ -1,3 +1,5 @@
|
||||
// +build darwin windows
|
||||
|
||||
/*
|
||||
Package systray is a cross-platform Go library to place an icon and menu in the notification area.
|
||||
*/
|
||||
|
@@ -1,3 +1,5 @@
|
||||
// +build never
|
||||
|
||||
package systray
|
||||
|
||||
/*
|
||||
|
@@ -1,9 +1,8 @@
|
||||
// +build !windows
|
||||
// +build darwin
|
||||
|
||||
package systray
|
||||
|
||||
/*
|
||||
#cgo linux pkg-config: gtk+-3.0 appindicator3-0.1
|
||||
#cgo darwin CFLAGS: -DDARWIN -x objective-c -fobjc-arc
|
||||
#cgo darwin LDFLAGS: -framework Cocoa
|
||||
|
||||
|
@@ -4,10 +4,12 @@ import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
"strings"
|
||||
|
||||
"github.com/nkanaev/yarr/src/content/scraper"
|
||||
"github.com/nkanaev/yarr/src/parser"
|
||||
@@ -37,29 +39,32 @@ func DiscoverFeed(candidateUrl string) (*DiscoverResult, error) {
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("status code %d", res.StatusCode)
|
||||
}
|
||||
cs := getCharset(res)
|
||||
|
||||
body, err := charset.NewReader(res.Body, res.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
content, err := ioutil.ReadAll(body)
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Try to feed into parser
|
||||
feed, err := parser.Parse(bytes.NewReader(content))
|
||||
feed, err := parser.ParseAndFix(bytes.NewReader(body), candidateUrl, cs)
|
||||
if err == nil {
|
||||
feed.TranslateURLs(candidateUrl)
|
||||
feed.SetMissingDatesTo(time.Now())
|
||||
result.Feed = feed
|
||||
result.FeedLink = candidateUrl
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Possibly an html link. Search for feed links
|
||||
content := string(body)
|
||||
if cs != "" {
|
||||
if r, err := charset.NewReaderLabel(cs, bytes.NewReader(body)); err == nil {
|
||||
if body, err := io.ReadAll(r); err == nil {
|
||||
content = string(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
sources := make([]FeedSource, 0)
|
||||
for url, title := range scraper.FindFeeds(string(content), candidateUrl) {
|
||||
for url, title := range scraper.FindFeeds(content, candidateUrl) {
|
||||
sources = append(sources, FeedSource{Title: title, Url: url})
|
||||
}
|
||||
switch {
|
||||
@@ -76,8 +81,16 @@ func DiscoverFeed(candidateUrl string) (*DiscoverResult, error) {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func findFavicon(websiteUrl, feedUrl string) (*[]byte, error) {
|
||||
candidateUrls := make([]string, 0)
|
||||
var emptyIcon = make([]byte, 0)
|
||||
var imageTypes = map[string]bool{
|
||||
"image/x-icon": true,
|
||||
"image/png": true,
|
||||
"image/jpeg": true,
|
||||
"image/gif": true,
|
||||
}
|
||||
|
||||
func findFavicon(siteUrl, feedUrl string) (*[]byte, error) {
|
||||
urls := make([]string, 0)
|
||||
|
||||
favicon := func(link string) string {
|
||||
u, err := url.Parse(link)
|
||||
@@ -87,49 +100,43 @@ func findFavicon(websiteUrl, feedUrl string) (*[]byte, error) {
|
||||
return fmt.Sprintf("%s://%s/favicon.ico", u.Scheme, u.Host)
|
||||
}
|
||||
|
||||
if len(websiteUrl) != 0 {
|
||||
res, err := client.get(websiteUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
candidateUrls = append(candidateUrls, scraper.FindIcons(string(body), websiteUrl)...)
|
||||
if c := favicon(websiteUrl); len(c) != 0 {
|
||||
candidateUrls = append(candidateUrls, c)
|
||||
}
|
||||
}
|
||||
if c := favicon(feedUrl); len(c) != 0 {
|
||||
candidateUrls = append(candidateUrls, c)
|
||||
}
|
||||
|
||||
imageTypes := [4]string{
|
||||
"image/x-icon",
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/gif",
|
||||
}
|
||||
for _, url := range candidateUrls {
|
||||
res, err := client.get(url)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode == 200 {
|
||||
if content, err := ioutil.ReadAll(res.Body); err == nil {
|
||||
ctype := http.DetectContentType(content)
|
||||
for _, itype := range imageTypes {
|
||||
if ctype == itype {
|
||||
return &content, nil
|
||||
}
|
||||
if siteUrl != "" {
|
||||
if res, err := client.get(siteUrl); err == nil {
|
||||
defer res.Body.Close()
|
||||
if body, err := ioutil.ReadAll(res.Body); err == nil {
|
||||
urls = append(urls, scraper.FindIcons(string(body), siteUrl)...)
|
||||
if c := favicon(siteUrl); c != "" {
|
||||
urls = append(urls, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
if c := favicon(feedUrl); c != "" {
|
||||
urls = append(urls, c)
|
||||
}
|
||||
|
||||
for _, u := range urls {
|
||||
res, err := client.get(u)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
continue
|
||||
}
|
||||
|
||||
content, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
ctype := http.DetectContentType(content)
|
||||
if imageTypes[ctype] {
|
||||
return &content, nil
|
||||
}
|
||||
}
|
||||
return &emptyIcon, nil
|
||||
}
|
||||
|
||||
func ConvertItems(items []parser.Item, feed storage.Feed) []storage.Item {
|
||||
@@ -183,12 +190,7 @@ func listItems(f storage.Feed, db *storage.Storage) ([]storage.Item, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
body, err := charset.NewReader(res.Body, res.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
feed, err := parser.Parse(body)
|
||||
feed, err := parser.ParseAndFix(res.Body, f.FeedLink, getCharset(res))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -198,7 +200,42 @@ func listItems(f storage.Feed, db *storage.Storage) ([]storage.Item, error) {
|
||||
if lmod != "" || etag != "" {
|
||||
db.SetHTTPState(f.Id, lmod, etag)
|
||||
}
|
||||
feed.TranslateURLs(f.FeedLink)
|
||||
feed.SetMissingDatesTo(time.Now())
|
||||
return ConvertItems(feed.Items, f), nil
|
||||
}
|
||||
|
||||
func getCharset(res *http.Response) string {
|
||||
contentType := res.Header.Get("Content-Type")
|
||||
if _, params, err := mime.ParseMediaType(contentType); err == nil {
|
||||
if cs, ok := params["charset"]; ok {
|
||||
if e, _ := charset.Lookup(cs); e != nil {
|
||||
return cs
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func GetBody(url string) (string, error) {
|
||||
res, err := client.get(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
var r io.Reader
|
||||
|
||||
ctype := res.Header.Get("Content-Type")
|
||||
if strings.Contains(ctype, "charset") {
|
||||
r, err = charset.NewReader(res.Body, ctype)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
r = res.Body
|
||||
}
|
||||
body, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(body), nil
|
||||
}
|
||||
|
@@ -9,6 +9,8 @@ import (
|
||||
"github.com/nkanaev/yarr/src/storage"
|
||||
)
|
||||
|
||||
const NUM_WORKERS = 4
|
||||
|
||||
type Worker struct {
|
||||
db *storage.Storage
|
||||
pending *int32
|
||||
@@ -39,10 +41,8 @@ func (w *Worker) StartFeedCleaner() {
|
||||
|
||||
func (w *Worker) FindFavicons() {
|
||||
go func() {
|
||||
for _, feed := range w.db.ListFeeds() {
|
||||
if !feed.HasIcon {
|
||||
w.FindFeedFavicon(feed)
|
||||
}
|
||||
for _, feed := range w.db.ListFeedsMissingIcons() {
|
||||
w.FindFeedFavicon(feed)
|
||||
}
|
||||
}()
|
||||
}
|
||||
@@ -88,45 +88,51 @@ func (w *Worker) SetRefreshRate(minute int64) {
|
||||
}
|
||||
|
||||
func (w *Worker) RefreshFeeds() {
|
||||
log.Print("Refreshing feeds")
|
||||
go w.refresher()
|
||||
}
|
||||
|
||||
func (w *Worker) refresher() {
|
||||
w.reflock.Lock()
|
||||
defer w.reflock.Unlock()
|
||||
|
||||
w.db.ResetFeedErrors()
|
||||
|
||||
feeds := w.db.ListFeeds()
|
||||
if len(feeds) == 0 {
|
||||
if *w.pending > 0 {
|
||||
log.Print("Refreshing already in progress")
|
||||
return
|
||||
}
|
||||
|
||||
feeds := w.db.ListFeeds()
|
||||
if len(feeds) == 0 {
|
||||
log.Print("Nothing to refresh")
|
||||
return
|
||||
}
|
||||
|
||||
log.Print("Refreshing feeds")
|
||||
atomic.StoreInt32(w.pending, int32(len(feeds)))
|
||||
go w.refresher(feeds)
|
||||
}
|
||||
|
||||
func (w *Worker) refresher(feeds []storage.Feed) {
|
||||
w.db.ResetFeedErrors()
|
||||
|
||||
srcqueue := make(chan storage.Feed, len(feeds))
|
||||
dstqueue := make(chan []storage.Item)
|
||||
|
||||
// hardcoded to 4 workers ;)
|
||||
go w.worker(srcqueue, dstqueue)
|
||||
go w.worker(srcqueue, dstqueue)
|
||||
go w.worker(srcqueue, dstqueue)
|
||||
go w.worker(srcqueue, dstqueue)
|
||||
for i := 0; i < NUM_WORKERS; i++ {
|
||||
go w.worker(srcqueue, dstqueue)
|
||||
}
|
||||
|
||||
for _, feed := range feeds {
|
||||
srcqueue <- feed
|
||||
}
|
||||
for i := 0; i < len(feeds); i++ {
|
||||
w.db.CreateItems(<-dstqueue)
|
||||
items := <-dstqueue
|
||||
if len(items) > 0 {
|
||||
w.db.CreateItems(items)
|
||||
w.db.SetFeedSize(items[0].FeedId, len(items))
|
||||
}
|
||||
atomic.AddInt32(w.pending, -1)
|
||||
w.db.SyncSearch()
|
||||
}
|
||||
close(srcqueue)
|
||||
close(dstqueue)
|
||||
|
||||
w.db.SyncSearch()
|
||||
log.Printf("Finished refreshing %d feeds", len(feeds))
|
||||
|
||||
w.reflock.Unlock()
|
||||
}
|
||||
|
||||
func (w *Worker) worker(srcqueue <-chan storage.Feed, dstqueue chan<- []storage.Item) {
|
||||
|
4
vendor/github.com/mattn/go-sqlite3/.codecov.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
coverage:
|
||||
status:
|
||||
project: off
|
||||
patch: off
|
14
vendor/github.com/mattn/go-sqlite3/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
*.db
|
||||
*.exe
|
||||
*.dll
|
||||
*.o
|
||||
|
||||
# VSCode
|
||||
.vscode
|
||||
|
||||
# Exclude from upgrade
|
||||
upgrade/*.c
|
||||
upgrade/*.h
|
||||
|
||||
# Exclude upgrade binary
|
||||
upgrade/upgrade
|
21
vendor/github.com/mattn/go-sqlite3/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Yasuhiro Matsumoto
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
592
vendor/github.com/mattn/go-sqlite3/README.md
generated
vendored
Normal file
@@ -0,0 +1,592 @@
|
||||
go-sqlite3
|
||||
==========
|
||||
|
||||
[](http://godoc.org/github.com/mattn/go-sqlite3)
|
||||
[](https://github.com/mattn/go-sqlite3/actions?query=workflow%3AGo)
|
||||
[](https://opencollective.com/mattn-go-sqlite3)
|
||||
[](https://codecov.io/gh/mattn/go-sqlite3)
|
||||
[](https://goreportcard.com/report/github.com/mattn/go-sqlite3)
|
||||
|
||||
Latest stable version is v1.14 or later not v2.
|
||||
|
||||
~~**NOTE:** The increase to v2 was an accident. There were no major changes or features.~~
|
||||
|
||||
# Description
|
||||
|
||||
sqlite3 driver conforming to the built-in database/sql interface
|
||||
|
||||
Supported Golang version: See [.github/workflows/go.yaml](./.github/workflows/go.yaml)
|
||||
|
||||
[This package follows the official Golang Release Policy.](https://golang.org/doc/devel/release.html#policy)
|
||||
|
||||
### Overview
|
||||
|
||||
- [go-sqlite3](#go-sqlite3)
|
||||
- [Description](#description)
|
||||
- [Overview](#overview)
|
||||
- [Installation](#installation)
|
||||
- [API Reference](#api-reference)
|
||||
- [Connection String](#connection-string)
|
||||
- [DSN Examples](#dsn-examples)
|
||||
- [Features](#features)
|
||||
- [Usage](#usage)
|
||||
- [Feature / Extension List](#feature--extension-list)
|
||||
- [Compilation](#compilation)
|
||||
- [Android](#android)
|
||||
- [ARM](#arm)
|
||||
- [Cross Compile](#cross-compile)
|
||||
- [Google Cloud Platform](#google-cloud-platform)
|
||||
- [Linux](#linux)
|
||||
- [Alpine](#alpine)
|
||||
- [Fedora](#fedora)
|
||||
- [Ubuntu](#ubuntu)
|
||||
- [Mac OSX](#mac-osx)
|
||||
- [Windows](#windows)
|
||||
- [Errors](#errors)
|
||||
- [User Authentication](#user-authentication)
|
||||
- [Compile](#compile)
|
||||
- [Usage](#usage-1)
|
||||
- [Create protected database](#create-protected-database)
|
||||
- [Password Encoding](#password-encoding)
|
||||
- [Available Encoders](#available-encoders)
|
||||
- [Restrictions](#restrictions)
|
||||
- [Support](#support)
|
||||
- [User Management](#user-management)
|
||||
- [SQL](#sql)
|
||||
- [Examples](#examples)
|
||||
- [*SQLiteConn](#sqliteconn)
|
||||
- [Attached database](#attached-database)
|
||||
- [Extensions](#extensions)
|
||||
- [Spatialite](#spatialite)
|
||||
- [FAQ](#faq)
|
||||
- [License](#license)
|
||||
- [Author](#author)
|
||||
|
||||
# Installation
|
||||
|
||||
This package can be installed with the go get command:
|
||||
|
||||
go get github.com/mattn/go-sqlite3
|
||||
|
||||
_go-sqlite3_ is *cgo* package.
|
||||
If you want to build your app using go-sqlite3, you need gcc.
|
||||
However, after you have built and installed _go-sqlite3_ with `go install github.com/mattn/go-sqlite3` (which requires gcc), you can build your app without relying on gcc in future.
|
||||
|
||||
***Important: because this is a `CGO` enabled package you are required to set the environment variable `CGO_ENABLED=1` and have a `gcc` compile present within your path.***
|
||||
|
||||
# API Reference
|
||||
|
||||
API documentation can be found here: http://godoc.org/github.com/mattn/go-sqlite3
|
||||
|
||||
Examples can be found under the [examples](./_example) directory
|
||||
|
||||
# Connection String
|
||||
|
||||
When creating a new SQLite database or connection to an existing one, with the file name additional options can be given.
|
||||
This is also known as a DSN string. (Data Source Name).
|
||||
|
||||
Options are append after the filename of the SQLite database.
|
||||
The database filename and options are seperated by an `?` (Question Mark).
|
||||
Options should be URL-encoded (see [url.QueryEscape](https://golang.org/pkg/net/url/#QueryEscape)).
|
||||
|
||||
This also applies when using an in-memory database instead of a file.
|
||||
|
||||
Options can be given using the following format: `KEYWORD=VALUE` and multiple options can be combined with the `&` ampersand.
|
||||
|
||||
This library supports dsn options of SQLite itself and provides additional options.
|
||||
|
||||
Boolean values can be one of:
|
||||
* `0` `no` `false` `off`
|
||||
* `1` `yes` `true` `on`
|
||||
|
||||
| Name | Key | Value(s) | Description |
|
||||
|------|-----|----------|-------------|
|
||||
| UA - Create | `_auth` | - | Create User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Username | `_auth_user` | `string` | Username for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Password | `_auth_pass` | `string` | Password for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Crypt | `_auth_crypt` | <ul><li>SHA1</li><li>SSHA1</li><li>SHA256</li><li>SSHA256</li><li>SHA384</li><li>SSHA384</li><li>SHA512</li><li>SSHA512</li></ul> | Password encoder to use for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Salt | `_auth_salt` | `string` | Salt to use if the configure password encoder requires a salt, for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| Auto Vacuum | `_auto_vacuum` \| `_vacuum` | <ul><li>`0` \| `none`</li><li>`1` \| `full`</li><li>`2` \| `incremental`</li></ul> | For more information see [PRAGMA auto_vacuum](https://www.sqlite.org/pragma.html#pragma_auto_vacuum) |
|
||||
| Busy Timeout | `_busy_timeout` \| `_timeout` | `int` | Specify value for sqlite3_busy_timeout. For more information see [PRAGMA busy_timeout](https://www.sqlite.org/pragma.html#pragma_busy_timeout) |
|
||||
| Case Sensitive LIKE | `_case_sensitive_like` \| `_cslike` | `boolean` | For more information see [PRAGMA case_sensitive_like](https://www.sqlite.org/pragma.html#pragma_case_sensitive_like) |
|
||||
| Defer Foreign Keys | `_defer_foreign_keys` \| `_defer_fk` | `boolean` | For more information see [PRAGMA defer_foreign_keys](https://www.sqlite.org/pragma.html#pragma_defer_foreign_keys) |
|
||||
| Foreign Keys | `_foreign_keys` \| `_fk` | `boolean` | For more information see [PRAGMA foreign_keys](https://www.sqlite.org/pragma.html#pragma_foreign_keys) |
|
||||
| Ignore CHECK Constraints | `_ignore_check_constraints` | `boolean` | For more information see [PRAGMA ignore_check_constraints](https://www.sqlite.org/pragma.html#pragma_ignore_check_constraints) |
|
||||
| Immutable | `immutable` | `boolean` | For more information see [Immutable](https://www.sqlite.org/c3ref/open.html) |
|
||||
| Journal Mode | `_journal_mode` \| `_journal` | <ul><li>DELETE</li><li>TRUNCATE</li><li>PERSIST</li><li>MEMORY</li><li>WAL</li><li>OFF</li></ul> | For more information see [PRAGMA journal_mode](https://www.sqlite.org/pragma.html#pragma_journal_mode) |
|
||||
| Locking Mode | `_locking_mode` \| `_locking` | <ul><li>NORMAL</li><li>EXCLUSIVE</li></ul> | For more information see [PRAGMA locking_mode](https://www.sqlite.org/pragma.html#pragma_locking_mode) |
|
||||
| Mode | `mode` | <ul><li>ro</li><li>rw</li><li>rwc</li><li>memory</li></ul> | Access Mode of the database. For more information see [SQLite Open](https://www.sqlite.org/c3ref/open.html) |
|
||||
| Mutex Locking | `_mutex` | <ul><li>no</li><li>full</li></ul> | Specify mutex mode. |
|
||||
| Query Only | `_query_only` | `boolean` | For more information see [PRAGMA query_only](https://www.sqlite.org/pragma.html#pragma_query_only) |
|
||||
| Recursive Triggers | `_recursive_triggers` \| `_rt` | `boolean` | For more information see [PRAGMA recursive_triggers](https://www.sqlite.org/pragma.html#pragma_recursive_triggers) |
|
||||
| Secure Delete | `_secure_delete` | `boolean` \| `FAST` | For more information see [PRAGMA secure_delete](https://www.sqlite.org/pragma.html#pragma_secure_delete) |
|
||||
| Shared-Cache Mode | `cache` | <ul><li>shared</li><li>private</li></ul> | Set cache mode for more information see [sqlite.org](https://www.sqlite.org/sharedcache.html) |
|
||||
| Synchronous | `_synchronous` \| `_sync` | <ul><li>0 \| OFF</li><li>1 \| NORMAL</li><li>2 \| FULL</li><li>3 \| EXTRA</li></ul> | For more information see [PRAGMA synchronous](https://www.sqlite.org/pragma.html#pragma_synchronous) |
|
||||
| Time Zone Location | `_loc` | auto | Specify location of time format. |
|
||||
| Transaction Lock | `_txlock` | <ul><li>immediate</li><li>deferred</li><li>exclusive</li></ul> | Specify locking behavior for transactions. |
|
||||
| Writable Schema | `_writable_schema` | `Boolean` | When this pragma is on, the SQLITE_MASTER tables in which database can be changed using ordinary UPDATE, INSERT, and DELETE statements. Warning: misuse of this pragma can easily result in a corrupt database file. |
|
||||
| Cache Size | `_cache_size` | `int` | Maximum cache size; default is 2000K (2M). See [PRAGMA cache_size](https://sqlite.org/pragma.html#pragma_cache_size) |
|
||||
|
||||
|
||||
## DSN Examples
|
||||
|
||||
```
|
||||
file:test.db?cache=shared&mode=memory
|
||||
```
|
||||
|
||||
# Features
|
||||
|
||||
This package allows additional configuration of features available within SQLite3 to be enabled or disabled by golang build constraints also known as build `tags`.
|
||||
|
||||
[Click here for more information about build tags / constraints.](https://golang.org/pkg/go/build/#hdr-Build_Constraints)
|
||||
|
||||
### Usage
|
||||
|
||||
If you wish to build this library with additional extensions / features.
|
||||
Use the following command.
|
||||
|
||||
```bash
|
||||
go build --tags "<FEATURE>"
|
||||
```
|
||||
|
||||
For available features see the extension list.
|
||||
When using multiple build tags, all the different tags should be space delimted.
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
go build --tags "icu json1 fts5 secure_delete"
|
||||
```
|
||||
|
||||
### Feature / Extension List
|
||||
|
||||
| Extension | Build Tag | Description |
|
||||
|-----------|-----------|-------------|
|
||||
| Additional Statistics | sqlite_stat4 | This option adds additional logic to the ANALYZE command and to the query planner that can help SQLite to chose a better query plan under certain situations. The ANALYZE command is enhanced to collect histogram data from all columns of every index and store that data in the sqlite_stat4 table.<br><br>The query planner will then use the histogram data to help it make better index choices. The downside of this compile-time option is that it violates the query planner stability guarantee making it more difficult to ensure consistent performance in mass-produced applications.<br><br>SQLITE_ENABLE_STAT4 is an enhancement of SQLITE_ENABLE_STAT3. STAT3 only recorded histogram data for the left-most column of each index whereas the STAT4 enhancement records histogram data from all columns of each index.<br><br>The SQLITE_ENABLE_STAT3 compile-time option is a no-op and is ignored if the SQLITE_ENABLE_STAT4 compile-time option is used |
|
||||
| Allow URI Authority | sqlite_allow_uri_authority | URI filenames normally throws an error if the authority section is not either empty or "localhost".<br><br>However, if SQLite is compiled with the SQLITE_ALLOW_URI_AUTHORITY compile-time option, then the URI is converted into a Uniform Naming Convention (UNC) filename and passed down to the underlying operating system that way |
|
||||
| App Armor | sqlite_app_armor | When defined, this C-preprocessor macro activates extra code that attempts to detect misuse of the SQLite API, such as passing in NULL pointers to required parameters or using objects after they have been destroyed. <br><br>App Armor is not available under `Windows`. |
|
||||
| Disable Load Extensions | sqlite_omit_load_extension | Loading of external extensions is enabled by default.<br><br>To disable extension loading add the build tag `sqlite_omit_load_extension`. |
|
||||
| Foreign Keys | sqlite_foreign_keys | This macro determines whether enforcement of foreign key constraints is enabled or disabled by default for new database connections.<br><br>Each database connection can always turn enforcement of foreign key constraints on and off and run-time using the foreign_keys pragma.<br><br>Enforcement of foreign key constraints is normally off by default, but if this compile-time parameter is set to 1, enforcement of foreign key constraints will be on by default |
|
||||
| Full Auto Vacuum | sqlite_vacuum_full | Set the default auto vacuum to full |
|
||||
| Incremental Auto Vacuum | sqlite_vacuum_incr | Set the default auto vacuum to incremental |
|
||||
| Full Text Search Engine | sqlite_fts5 | When this option is defined in the amalgamation, versions 5 of the full-text search engine (fts5) is added to the build automatically |
|
||||
| International Components for Unicode | sqlite_icu | This option causes the International Components for Unicode or "ICU" extension to SQLite to be added to the build |
|
||||
| Introspect PRAGMAS | sqlite_introspect | This option adds some extra PRAGMA statements. <ul><li>PRAGMA function_list</li><li>PRAGMA module_list</li><li>PRAGMA pragma_list</li></ul> |
|
||||
| JSON SQL Functions | sqlite_json | When this option is defined in the amalgamation, the JSON SQL functions are added to the build automatically |
|
||||
| Pre Update Hook | sqlite_preupdate_hook | Registers a callback function that is invoked prior to each INSERT, UPDATE, and DELETE operation on a database table. |
|
||||
| Secure Delete | sqlite_secure_delete | This compile-time option changes the default setting of the secure_delete pragma.<br><br>When this option is not used, secure_delete defaults to off. When this option is present, secure_delete defaults to on.<br><br>The secure_delete setting causes deleted content to be overwritten with zeros. There is a small performance penalty since additional I/O must occur.<br><br>On the other hand, secure_delete can prevent fragments of sensitive information from lingering in unused parts of the database file after it has been deleted. See the documentation on the secure_delete pragma for additional information |
|
||||
| Secure Delete (FAST) | sqlite_secure_delete_fast | For more information see [PRAGMA secure_delete](https://www.sqlite.org/pragma.html#pragma_secure_delete) |
|
||||
| Tracing / Debug | sqlite_trace | Activate trace functions |
|
||||
| User Authentication | sqlite_userauth | SQLite User Authentication see [User Authentication](#user-authentication) for more information. |
|
||||
|
||||
# Compilation
|
||||
|
||||
This package requires `CGO_ENABLED=1` ennvironment variable if not set by default, and the presence of the `gcc` compiler.
|
||||
|
||||
If you need to add additional CFLAGS or LDFLAGS to the build command, and do not want to modify this package. Then this can be achieved by using the `CGO_CFLAGS` and `CGO_LDFLAGS` environment variables.
|
||||
|
||||
## Android
|
||||
|
||||
This package can be compiled for android.
|
||||
Compile with:
|
||||
|
||||
```bash
|
||||
go build --tags "android"
|
||||
```
|
||||
|
||||
For more information see [#201](https://github.com/mattn/go-sqlite3/issues/201)
|
||||
|
||||
# ARM
|
||||
|
||||
To compile for `ARM` use the following environment.
|
||||
|
||||
```bash
|
||||
env CC=arm-linux-gnueabihf-gcc CXX=arm-linux-gnueabihf-g++ \
|
||||
CGO_ENABLED=1 GOOS=linux GOARCH=arm GOARM=7 \
|
||||
go build -v
|
||||
```
|
||||
|
||||
Additional information:
|
||||
- [#242](https://github.com/mattn/go-sqlite3/issues/242)
|
||||
- [#504](https://github.com/mattn/go-sqlite3/issues/504)
|
||||
|
||||
# Cross Compile
|
||||
|
||||
This library can be cross-compiled.
|
||||
|
||||
In some cases you are required to the `CC` environment variable with the cross compiler.
|
||||
|
||||
## Cross Compiling from MAC OSX
|
||||
The simplest way to cross compile from OSX is to use [xgo](https://github.com/karalabe/xgo).
|
||||
|
||||
Steps:
|
||||
- Install [xgo](https://github.com/karalabe/xgo) (`go get github.com/karalabe/xgo`).
|
||||
- Ensure that your project is within your `GOPATH`.
|
||||
- Run `xgo local/path/to/project`.
|
||||
|
||||
Please refer to the project's [README](https://github.com/karalabe/xgo/blob/master/README.md) for further information.
|
||||
|
||||
# Google Cloud Platform
|
||||
|
||||
Building on GCP is not possible because Google Cloud Platform does not allow `gcc` to be executed.
|
||||
|
||||
Please work only with compiled final binaries.
|
||||
|
||||
## Linux
|
||||
|
||||
To compile this package on Linux you must install the development tools for your linux distribution.
|
||||
|
||||
To compile under linux use the build tag `linux`.
|
||||
|
||||
```bash
|
||||
go build --tags "linux"
|
||||
```
|
||||
|
||||
If you wish to link directly to libsqlite3 then you can use the `libsqlite3` build tag.
|
||||
|
||||
```
|
||||
go build --tags "libsqlite3 linux"
|
||||
```
|
||||
|
||||
### Alpine
|
||||
|
||||
When building in an `alpine` container run the following command before building.
|
||||
|
||||
```
|
||||
apk add --update gcc musl-dev
|
||||
```
|
||||
|
||||
### Fedora
|
||||
|
||||
```bash
|
||||
sudo yum groupinstall "Development Tools" "Development Libraries"
|
||||
```
|
||||
|
||||
### Ubuntu
|
||||
|
||||
```bash
|
||||
sudo apt-get install build-essential
|
||||
```
|
||||
|
||||
## Mac OSX
|
||||
|
||||
OSX should have all the tools present to compile this package, if not install XCode this will add all the developers tools.
|
||||
|
||||
Required dependency
|
||||
|
||||
```bash
|
||||
brew install sqlite3
|
||||
```
|
||||
|
||||
For OSX there is an additional package install which is required if you wish to build the `icu` extension.
|
||||
|
||||
This additional package can be installed with `homebrew`.
|
||||
|
||||
```bash
|
||||
brew upgrade icu4c
|
||||
```
|
||||
|
||||
To compile for Mac OSX.
|
||||
|
||||
```bash
|
||||
go build --tags "darwin"
|
||||
```
|
||||
|
||||
If you wish to link directly to libsqlite3 then you can use the `libsqlite3` build tag.
|
||||
|
||||
```
|
||||
go build --tags "libsqlite3 darwin"
|
||||
```
|
||||
|
||||
Additional information:
|
||||
- [#206](https://github.com/mattn/go-sqlite3/issues/206)
|
||||
- [#404](https://github.com/mattn/go-sqlite3/issues/404)
|
||||
|
||||
## Windows
|
||||
|
||||
To compile this package on Windows OS you must have the `gcc` compiler installed.
|
||||
|
||||
1) Install a Windows `gcc` toolchain.
|
||||
2) Add the `bin` folders to the Windows path if the installer did not do this by default.
|
||||
3) Open a terminal for the TDM-GCC toolchain, can be found in the Windows Start menu.
|
||||
4) Navigate to your project folder and run the `go build ...` command for this package.
|
||||
|
||||
For example the TDM-GCC Toolchain can be found [here](https://sourceforge.net/projects/tdm-gcc/).
|
||||
|
||||
## Errors
|
||||
|
||||
- Compile error: `can not be used when making a shared object; recompile with -fPIC`
|
||||
|
||||
When receiving a compile time error referencing recompile with `-FPIC` then you
|
||||
are probably using a hardend system.
|
||||
|
||||
You can compile the library on a hardend system with the following command.
|
||||
|
||||
```bash
|
||||
go build -ldflags '-extldflags=-fno-PIC'
|
||||
```
|
||||
|
||||
More details see [#120](https://github.com/mattn/go-sqlite3/issues/120)
|
||||
|
||||
- Can't build go-sqlite3 on windows 64bit.
|
||||
|
||||
> Probably, you are using go 1.0, go1.0 has a problem when it comes to compiling/linking on windows 64bit.
|
||||
> See: [#27](https://github.com/mattn/go-sqlite3/issues/27)
|
||||
|
||||
- `go get github.com/mattn/go-sqlite3` throws compilation error.
|
||||
|
||||
`gcc` throws: `internal compiler error`
|
||||
|
||||
Remove the download repository from your disk and try re-install with:
|
||||
|
||||
```bash
|
||||
go install github.com/mattn/go-sqlite3
|
||||
```
|
||||
|
||||
# User Authentication
|
||||
|
||||
This package supports the SQLite User Authentication module.
|
||||
|
||||
## Compile
|
||||
|
||||
To use the User authentication module the package has to be compiled with the tag `sqlite_userauth`. See [Features](#features).
|
||||
|
||||
## Usage
|
||||
|
||||
### Create protected database
|
||||
|
||||
To create a database protected by user authentication provide the following argument to the connection string `_auth`.
|
||||
This will enable user authentication within the database. This option however requires two additional arguments:
|
||||
|
||||
- `_auth_user`
|
||||
- `_auth_pass`
|
||||
|
||||
When `_auth` is present on the connection string user authentication will be enabled and the provided user will be created
|
||||
as an `admin` user. After initial creation, the parameter `_auth` has no effect anymore and can be omitted from the connection string.
|
||||
|
||||
Example connection string:
|
||||
|
||||
Create an user authentication database with user `admin` and password `admin`.
|
||||
|
||||
`file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin`
|
||||
|
||||
Create an user authentication database with user `admin` and password `admin` and use `SHA1` for the password encoding.
|
||||
|
||||
`file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin&_auth_crypt=sha1`
|
||||
|
||||
### Password Encoding
|
||||
|
||||
The passwords within the user authentication module of SQLite are encoded with the SQLite function `sqlite_cryp`.
|
||||
This function uses a ceasar-cypher which is quite insecure.
|
||||
This library provides several additional password encoders which can be configured through the connection string.
|
||||
|
||||
The password cypher can be configured with the key `_auth_crypt`. And if the configured password encoder also requires an
|
||||
salt this can be configured with `_auth_salt`.
|
||||
|
||||
#### Available Encoders
|
||||
|
||||
- SHA1
|
||||
- SSHA1 (Salted SHA1)
|
||||
- SHA256
|
||||
- SSHA256 (salted SHA256)
|
||||
- SHA384
|
||||
- SSHA384 (salted SHA384)
|
||||
- SHA512
|
||||
- SSHA512 (salted SHA512)
|
||||
|
||||
### Restrictions
|
||||
|
||||
Operations on the database regarding to user management can only be preformed by an administrator user.
|
||||
|
||||
### Support
|
||||
|
||||
The user authentication supports two kinds of users
|
||||
|
||||
- administrators
|
||||
- regular users
|
||||
|
||||
### User Management
|
||||
|
||||
User management can be done by directly using the `*SQLiteConn` or by SQL.
|
||||
|
||||
#### SQL
|
||||
|
||||
The following sql functions are available for user management.
|
||||
|
||||
| Function | Arguments | Description |
|
||||
|----------|-----------|-------------|
|
||||
| `authenticate` | username `string`, password `string` | Will authenticate an user, this is done by the connection; and should not be used manually. |
|
||||
| `auth_user_add` | username `string`, password `string`, admin `int` | This function will add an user to the database.<br>if the database is not protected by user authentication it will enable it. Argument `admin` is an integer identifying if the added user should be an administrator. Only Administrators can add administrators. |
|
||||
| `auth_user_change` | username `string`, password `string`, admin `int` | Function to modify an user. Users can change their own password, but only an administrator can change the administrator flag. |
|
||||
| `authUserDelete` | username `string` | Delete an user from the database. Can only be used by an administrator. The current logged in administrator cannot be deleted. This is to make sure their is always an administrator remaining. |
|
||||
|
||||
These functions will return an integer.
|
||||
|
||||
- 0 (SQLITE_OK)
|
||||
- 23 (SQLITE_AUTH) Failed to perform due to authentication or insufficient privileges
|
||||
|
||||
##### Examples
|
||||
|
||||
```sql
|
||||
// Autheticate user
|
||||
// Create Admin User
|
||||
SELECT auth_user_add('admin2', 'admin2', 1);
|
||||
|
||||
// Change password for user
|
||||
SELECT auth_user_change('user', 'userpassword', 0);
|
||||
|
||||
// Delete user
|
||||
SELECT user_delete('user');
|
||||
```
|
||||
|
||||
#### *SQLiteConn
|
||||
|
||||
The following functions are available for User authentication from the `*SQLiteConn`.
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `Authenticate(username, password string) error` | Authenticate user |
|
||||
| `AuthUserAdd(username, password string, admin bool) error` | Add user |
|
||||
| `AuthUserChange(username, password string, admin bool) error` | Modify user |
|
||||
| `AuthUserDelete(username string) error` | Delete user |
|
||||
|
||||
### Attached database
|
||||
|
||||
When using attached databases. SQLite will use the authentication from the `main` database for the attached database(s).
|
||||
|
||||
# Extensions
|
||||
|
||||
If you want your own extension to be listed here or you want to add a reference to an extension; please submit an Issue for this.
|
||||
|
||||
## Spatialite
|
||||
|
||||
Spatialite is available as an extension to SQLite, and can be used in combination with this repository.
|
||||
For an example see [shaxbee/go-spatialite](https://github.com/shaxbee/go-spatialite).
|
||||
|
||||
## extension-functions.c from SQLite3 Contrib
|
||||
|
||||
extension-functions.c is available as an extension to SQLite, and provides the following functions:
|
||||
|
||||
- Math: acos, asin, atan, atn2, atan2, acosh, asinh, atanh, difference, degrees, radians, cos, sin, tan, cot, cosh, sinh, tanh, coth, exp, log, log10, power, sign, sqrt, square, ceil, floor, pi.
|
||||
- String: replicate, charindex, leftstr, rightstr, ltrim, rtrim, trim, replace, reverse, proper, padl, padr, padc, strfilter.
|
||||
- Aggregate: stdev, variance, mode, median, lower_quartile, upper_quartile
|
||||
|
||||
For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/dinedal/go-sqlite3-extension-functions).
|
||||
|
||||
# FAQ
|
||||
|
||||
- Getting insert error while query is opened.
|
||||
|
||||
> You can pass some arguments into the connection string, for example, a URI.
|
||||
> See: [#39](https://github.com/mattn/go-sqlite3/issues/39)
|
||||
|
||||
- Do you want to cross compile? mingw on Linux or Mac?
|
||||
|
||||
> See: [#106](https://github.com/mattn/go-sqlite3/issues/106)
|
||||
> See also: http://www.limitlessfx.com/cross-compile-golang-app-for-windows-from-linux.html
|
||||
|
||||
- Want to get time.Time with current locale
|
||||
|
||||
Use `_loc=auto` in SQLite3 filename schema like `file:foo.db?_loc=auto`.
|
||||
|
||||
- Can I use this in multiple routines concurrently?
|
||||
|
||||
Yes for readonly. But, No for writable. See [#50](https://github.com/mattn/go-sqlite3/issues/50), [#51](https://github.com/mattn/go-sqlite3/issues/51), [#209](https://github.com/mattn/go-sqlite3/issues/209), [#274](https://github.com/mattn/go-sqlite3/issues/274).
|
||||
|
||||
- Why I'm getting `no such table` error?
|
||||
|
||||
Why is it racy if I use a `sql.Open("sqlite3", ":memory:")` database?
|
||||
|
||||
Each connection to `":memory:"` opens a brand new in-memory sql database, so if
|
||||
the stdlib's sql engine happens to open another connection and you've only
|
||||
specified `":memory:"`, that connection will see a brand new database. A
|
||||
workaround is to use `"file::memory:?cache=shared"` (or `"file:foobar?mode=memory&cache=shared"`). Every
|
||||
connection to this string will point to the same in-memory database.
|
||||
|
||||
Note that if the last database connection in the pool closes, the in-memory database is deleted. Make sure the [max idle connection limit](https://golang.org/pkg/database/sql/#DB.SetMaxIdleConns) is > 0, and the [connection lifetime](https://golang.org/pkg/database/sql/#DB.SetConnMaxLifetime) is infinite.
|
||||
|
||||
For more information see
|
||||
* [#204](https://github.com/mattn/go-sqlite3/issues/204)
|
||||
* [#511](https://github.com/mattn/go-sqlite3/issues/511)
|
||||
* https://www.sqlite.org/sharedcache.html#shared_cache_and_in_memory_databases
|
||||
* https://www.sqlite.org/inmemorydb.html#sharedmemdb
|
||||
|
||||
- Reading from database with large amount of goroutines fails on OSX.
|
||||
|
||||
OS X limits OS-wide to not have more than 1000 files open simultaneously by default.
|
||||
|
||||
For more information see [#289](https://github.com/mattn/go-sqlite3/issues/289)
|
||||
|
||||
- Trying to execute a `.` (dot) command throws an error.
|
||||
|
||||
Error: `Error: near ".": syntax error`
|
||||
Dot command are part of SQLite3 CLI not of this library.
|
||||
|
||||
You need to implement the feature or call the sqlite3 cli.
|
||||
|
||||
More information see [#305](https://github.com/mattn/go-sqlite3/issues/305)
|
||||
|
||||
- Error: `database is locked`
|
||||
|
||||
When you get a database is locked. Please use the following options.
|
||||
|
||||
Add to DSN: `cache=shared`
|
||||
|
||||
Example:
|
||||
```go
|
||||
db, err := sql.Open("sqlite3", "file:locked.sqlite?cache=shared")
|
||||
```
|
||||
|
||||
Second please set the database connections of the SQL package to 1.
|
||||
|
||||
```go
|
||||
db.SetMaxOpenConns(1)
|
||||
```
|
||||
|
||||
More information see [#209](https://github.com/mattn/go-sqlite3/issues/209)
|
||||
|
||||
## Contributors
|
||||
|
||||
### Code Contributors
|
||||
|
||||
This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
|
||||
<a href="https://github.com/mattn/go-sqlite3/graphs/contributors"><img src="https://opencollective.com/mattn-go-sqlite3/contributors.svg?width=890&button=false" /></a>
|
||||
|
||||
### Financial Contributors
|
||||
|
||||
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/mattn-go-sqlite3/contribute)]
|
||||
|
||||
#### Individuals
|
||||
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3"><img src="https://opencollective.com/mattn-go-sqlite3/individuals.svg?width=890"></a>
|
||||
|
||||
#### Organizations
|
||||
|
||||
Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/mattn-go-sqlite3/contribute)]
|
||||
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/0/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/1/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/2/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/3/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/4/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/5/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/6/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/7/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/8/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/9/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/9/avatar.svg"></a>
|
||||
|
||||
# License
|
||||
|
||||
MIT: http://mattn.mit-license.org/2018
|
||||
|
||||
sqlite3-binding.c, sqlite3-binding.h, sqlite3ext.h
|
||||
|
||||
The -binding suffix was added to avoid build failures under gccgo.
|
||||
|
||||
In this repository, those files are an amalgamation of code that was copied from SQLite3. The license of that code is the same as the license of SQLite3.
|
||||
|
||||
# Author
|
||||
|
||||
Yasuhiro Matsumoto (a.k.a mattn)
|
||||
|
||||
G.J.R. Timmer
|
85
vendor/github.com/mattn/go-sqlite3/backup.go
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include <sqlite3-binding.h>
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"runtime"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// SQLiteBackup implement interface of Backup.
|
||||
type SQLiteBackup struct {
|
||||
b *C.sqlite3_backup
|
||||
}
|
||||
|
||||
// Backup make backup from src to dest.
|
||||
func (destConn *SQLiteConn) Backup(dest string, srcConn *SQLiteConn, src string) (*SQLiteBackup, error) {
|
||||
destptr := C.CString(dest)
|
||||
defer C.free(unsafe.Pointer(destptr))
|
||||
srcptr := C.CString(src)
|
||||
defer C.free(unsafe.Pointer(srcptr))
|
||||
|
||||
if b := C.sqlite3_backup_init(destConn.db, destptr, srcConn.db, srcptr); b != nil {
|
||||
bb := &SQLiteBackup{b: b}
|
||||
runtime.SetFinalizer(bb, (*SQLiteBackup).Finish)
|
||||
return bb, nil
|
||||
}
|
||||
return nil, destConn.lastError()
|
||||
}
|
||||
|
||||
// Step to backs up for one step. Calls the underlying `sqlite3_backup_step`
|
||||
// function. This function returns a boolean indicating if the backup is done
|
||||
// and an error signalling any other error. Done is returned if the underlying
|
||||
// C function returns SQLITE_DONE (Code 101)
|
||||
func (b *SQLiteBackup) Step(p int) (bool, error) {
|
||||
ret := C.sqlite3_backup_step(b.b, C.int(p))
|
||||
if ret == C.SQLITE_DONE {
|
||||
return true, nil
|
||||
} else if ret != 0 && ret != C.SQLITE_LOCKED && ret != C.SQLITE_BUSY {
|
||||
return false, Error{Code: ErrNo(ret)}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Remaining return whether have the rest for backup.
|
||||
func (b *SQLiteBackup) Remaining() int {
|
||||
return int(C.sqlite3_backup_remaining(b.b))
|
||||
}
|
||||
|
||||
// PageCount return count of pages.
|
||||
func (b *SQLiteBackup) PageCount() int {
|
||||
return int(C.sqlite3_backup_pagecount(b.b))
|
||||
}
|
||||
|
||||
// Finish close backup.
|
||||
func (b *SQLiteBackup) Finish() error {
|
||||
return b.Close()
|
||||
}
|
||||
|
||||
// Close close backup.
|
||||
func (b *SQLiteBackup) Close() error {
|
||||
ret := C.sqlite3_backup_finish(b.b)
|
||||
|
||||
// sqlite3_backup_finish() never fails, it just returns the
|
||||
// error code from previous operations, so clean up before
|
||||
// checking and returning an error
|
||||
b.b = nil
|
||||
runtime.SetFinalizer(b, nil)
|
||||
|
||||
if ret != 0 {
|
||||
return Error{Code: ErrNo(ret)}
|
||||
}
|
||||
return nil
|
||||
}
|
392
vendor/github.com/mattn/go-sqlite3/callback.go
generated
vendored
Normal file
@@ -0,0 +1,392 @@
|
||||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
// You can't export a Go function to C and have definitions in the C
|
||||
// preamble in the same file, so we have to have callbackTrampoline in
|
||||
// its own file. Because we need a separate file anyway, the support
|
||||
// code for SQLite custom functions is in here.
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include <sqlite3-binding.h>
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
|
||||
void _sqlite3_result_text(sqlite3_context* ctx, const char* s);
|
||||
void _sqlite3_result_blob(sqlite3_context* ctx, const void* b, int l);
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
//export callbackTrampoline
|
||||
func callbackTrampoline(ctx *C.sqlite3_context, argc int, argv **C.sqlite3_value) {
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:argc:argc]
|
||||
fi := lookupHandle(C.sqlite3_user_data(ctx)).(*functionInfo)
|
||||
fi.Call(ctx, args)
|
||||
}
|
||||
|
||||
//export stepTrampoline
|
||||
func stepTrampoline(ctx *C.sqlite3_context, argc C.int, argv **C.sqlite3_value) {
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:int(argc):int(argc)]
|
||||
ai := lookupHandle(C.sqlite3_user_data(ctx)).(*aggInfo)
|
||||
ai.Step(ctx, args)
|
||||
}
|
||||
|
||||
//export doneTrampoline
|
||||
func doneTrampoline(ctx *C.sqlite3_context) {
|
||||
ai := lookupHandle(C.sqlite3_user_data(ctx)).(*aggInfo)
|
||||
ai.Done(ctx)
|
||||
}
|
||||
|
||||
//export compareTrampoline
|
||||
func compareTrampoline(handlePtr unsafe.Pointer, la C.int, a *C.char, lb C.int, b *C.char) C.int {
|
||||
cmp := lookupHandle(handlePtr).(func(string, string) int)
|
||||
return C.int(cmp(C.GoStringN(a, la), C.GoStringN(b, lb)))
|
||||
}
|
||||
|
||||
//export commitHookTrampoline
|
||||
func commitHookTrampoline(handle unsafe.Pointer) int {
|
||||
callback := lookupHandle(handle).(func() int)
|
||||
return callback()
|
||||
}
|
||||
|
||||
//export rollbackHookTrampoline
|
||||
func rollbackHookTrampoline(handle unsafe.Pointer) {
|
||||
callback := lookupHandle(handle).(func())
|
||||
callback()
|
||||
}
|
||||
|
||||
//export updateHookTrampoline
|
||||
func updateHookTrampoline(handle unsafe.Pointer, op int, db *C.char, table *C.char, rowid int64) {
|
||||
callback := lookupHandle(handle).(func(int, string, string, int64))
|
||||
callback(op, C.GoString(db), C.GoString(table), rowid)
|
||||
}
|
||||
|
||||
//export authorizerTrampoline
|
||||
func authorizerTrampoline(handle unsafe.Pointer, op int, arg1 *C.char, arg2 *C.char, arg3 *C.char) int {
|
||||
callback := lookupHandle(handle).(func(int, string, string, string) int)
|
||||
return callback(op, C.GoString(arg1), C.GoString(arg2), C.GoString(arg3))
|
||||
}
|
||||
|
||||
//export preUpdateHookTrampoline
|
||||
func preUpdateHookTrampoline(handle unsafe.Pointer, dbHandle uintptr, op int, db *C.char, table *C.char, oldrowid int64, newrowid int64) {
|
||||
hval := lookupHandleVal(handle)
|
||||
data := SQLitePreUpdateData{
|
||||
Conn: hval.db,
|
||||
Op: op,
|
||||
DatabaseName: C.GoString(db),
|
||||
TableName: C.GoString(table),
|
||||
OldRowID: oldrowid,
|
||||
NewRowID: newrowid,
|
||||
}
|
||||
callback := hval.val.(func(SQLitePreUpdateData))
|
||||
callback(data)
|
||||
}
|
||||
|
||||
// Use handles to avoid passing Go pointers to C.
|
||||
type handleVal struct {
|
||||
db *SQLiteConn
|
||||
val interface{}
|
||||
}
|
||||
|
||||
var handleLock sync.Mutex
|
||||
var handleVals = make(map[unsafe.Pointer]handleVal)
|
||||
|
||||
func newHandle(db *SQLiteConn, v interface{}) unsafe.Pointer {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
val := handleVal{db: db, val: v}
|
||||
var p unsafe.Pointer = C.malloc(C.size_t(1))
|
||||
if p == nil {
|
||||
panic("can't allocate 'cgo-pointer hack index pointer': ptr == nil")
|
||||
}
|
||||
handleVals[p] = val
|
||||
return p
|
||||
}
|
||||
|
||||
func lookupHandleVal(handle unsafe.Pointer) handleVal {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
return handleVals[handle]
|
||||
}
|
||||
|
||||
func lookupHandle(handle unsafe.Pointer) interface{} {
|
||||
return lookupHandleVal(handle).val
|
||||
}
|
||||
|
||||
func deleteHandles(db *SQLiteConn) {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
for handle, val := range handleVals {
|
||||
if val.db == db {
|
||||
delete(handleVals, handle)
|
||||
C.free(handle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is only here so that tests can refer to it.
|
||||
type callbackArgRaw C.sqlite3_value
|
||||
|
||||
type callbackArgConverter func(*C.sqlite3_value) (reflect.Value, error)
|
||||
|
||||
type callbackArgCast struct {
|
||||
f callbackArgConverter
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (c callbackArgCast) Run(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
val, err := c.f(v)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
if !val.Type().ConvertibleTo(c.typ) {
|
||||
return reflect.Value{}, fmt.Errorf("cannot convert %s to %s", val.Type(), c.typ)
|
||||
}
|
||||
return val.Convert(c.typ), nil
|
||||
}
|
||||
|
||||
func callbackArgInt64(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_INTEGER {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be an INTEGER")
|
||||
}
|
||||
return reflect.ValueOf(int64(C.sqlite3_value_int64(v))), nil
|
||||
}
|
||||
|
||||
func callbackArgBool(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_INTEGER {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be an INTEGER")
|
||||
}
|
||||
i := int64(C.sqlite3_value_int64(v))
|
||||
val := false
|
||||
if i != 0 {
|
||||
val = true
|
||||
}
|
||||
return reflect.ValueOf(val), nil
|
||||
}
|
||||
|
||||
func callbackArgFloat64(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_FLOAT {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be a FLOAT")
|
||||
}
|
||||
return reflect.ValueOf(float64(C.sqlite3_value_double(v))), nil
|
||||
}
|
||||
|
||||
func callbackArgBytes(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_BLOB:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
p := C.sqlite3_value_blob(v)
|
||||
return reflect.ValueOf(C.GoBytes(p, l)), nil
|
||||
case C.SQLITE_TEXT:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
c := unsafe.Pointer(C.sqlite3_value_text(v))
|
||||
return reflect.ValueOf(C.GoBytes(c, l)), nil
|
||||
default:
|
||||
return reflect.Value{}, fmt.Errorf("argument must be BLOB or TEXT")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArgString(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_BLOB:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
p := (*C.char)(C.sqlite3_value_blob(v))
|
||||
return reflect.ValueOf(C.GoStringN(p, l)), nil
|
||||
case C.SQLITE_TEXT:
|
||||
c := (*C.char)(unsafe.Pointer(C.sqlite3_value_text(v)))
|
||||
return reflect.ValueOf(C.GoString(c)), nil
|
||||
default:
|
||||
return reflect.Value{}, fmt.Errorf("argument must be BLOB or TEXT")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArgGeneric(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_INTEGER:
|
||||
return callbackArgInt64(v)
|
||||
case C.SQLITE_FLOAT:
|
||||
return callbackArgFloat64(v)
|
||||
case C.SQLITE_TEXT:
|
||||
return callbackArgString(v)
|
||||
case C.SQLITE_BLOB:
|
||||
return callbackArgBytes(v)
|
||||
case C.SQLITE_NULL:
|
||||
// Interpret NULL as a nil byte slice.
|
||||
var ret []byte
|
||||
return reflect.ValueOf(ret), nil
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArg(typ reflect.Type) (callbackArgConverter, error) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
if typ.NumMethod() != 0 {
|
||||
return nil, errors.New("the only supported interface type is interface{}")
|
||||
}
|
||||
return callbackArgGeneric, nil
|
||||
case reflect.Slice:
|
||||
if typ.Elem().Kind() != reflect.Uint8 {
|
||||
return nil, errors.New("the only supported slice type is []byte")
|
||||
}
|
||||
return callbackArgBytes, nil
|
||||
case reflect.String:
|
||||
return callbackArgString, nil
|
||||
case reflect.Bool:
|
||||
return callbackArgBool, nil
|
||||
case reflect.Int64:
|
||||
return callbackArgInt64, nil
|
||||
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
c := callbackArgCast{callbackArgInt64, typ}
|
||||
return c.Run, nil
|
||||
case reflect.Float64:
|
||||
return callbackArgFloat64, nil
|
||||
case reflect.Float32:
|
||||
c := callbackArgCast{callbackArgFloat64, typ}
|
||||
return c.Run, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("don't know how to convert to %s", typ)
|
||||
}
|
||||
}
|
||||
|
||||
func callbackConvertArgs(argv []*C.sqlite3_value, converters []callbackArgConverter, variadic callbackArgConverter) ([]reflect.Value, error) {
|
||||
var args []reflect.Value
|
||||
|
||||
if len(argv) < len(converters) {
|
||||
return nil, fmt.Errorf("function requires at least %d arguments", len(converters))
|
||||
}
|
||||
|
||||
for i, arg := range argv[:len(converters)] {
|
||||
v, err := converters[i](arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if variadic != nil {
|
||||
for _, arg := range argv[len(converters):] {
|
||||
v, err := variadic(arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args = append(args, v)
|
||||
}
|
||||
}
|
||||
return args, nil
|
||||
}
|
||||
|
||||
type callbackRetConverter func(*C.sqlite3_context, reflect.Value) error
|
||||
|
||||
func callbackRetInteger(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int64:
|
||||
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
v = v.Convert(reflect.TypeOf(int64(0)))
|
||||
case reflect.Bool:
|
||||
b := v.Interface().(bool)
|
||||
if b {
|
||||
v = reflect.ValueOf(int64(1))
|
||||
} else {
|
||||
v = reflect.ValueOf(int64(0))
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("cannot convert %s to INTEGER", v.Type())
|
||||
}
|
||||
|
||||
C.sqlite3_result_int64(ctx, C.sqlite3_int64(v.Interface().(int64)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetFloat(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Float64:
|
||||
case reflect.Float32:
|
||||
v = v.Convert(reflect.TypeOf(float64(0)))
|
||||
default:
|
||||
return fmt.Errorf("cannot convert %s to FLOAT", v.Type())
|
||||
}
|
||||
|
||||
C.sqlite3_result_double(ctx, C.double(v.Interface().(float64)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetBlob(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
if v.Type().Kind() != reflect.Slice || v.Type().Elem().Kind() != reflect.Uint8 {
|
||||
return fmt.Errorf("cannot convert %s to BLOB", v.Type())
|
||||
}
|
||||
i := v.Interface()
|
||||
if i == nil || len(i.([]byte)) == 0 {
|
||||
C.sqlite3_result_null(ctx)
|
||||
} else {
|
||||
bs := i.([]byte)
|
||||
C._sqlite3_result_blob(ctx, unsafe.Pointer(&bs[0]), C.int(len(bs)))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetText(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
if v.Type().Kind() != reflect.String {
|
||||
return fmt.Errorf("cannot convert %s to TEXT", v.Type())
|
||||
}
|
||||
C._sqlite3_result_text(ctx, C.CString(v.Interface().(string)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetNil(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRet(typ reflect.Type) (callbackRetConverter, error) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
errorInterface := reflect.TypeOf((*error)(nil)).Elem()
|
||||
if typ.Implements(errorInterface) {
|
||||
return callbackRetNil, nil
|
||||
}
|
||||
fallthrough
|
||||
case reflect.Slice:
|
||||
if typ.Elem().Kind() != reflect.Uint8 {
|
||||
return nil, errors.New("the only supported slice type is []byte")
|
||||
}
|
||||
return callbackRetBlob, nil
|
||||
case reflect.String:
|
||||
return callbackRetText, nil
|
||||
case reflect.Bool, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
return callbackRetInteger, nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return callbackRetFloat, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("don't know how to convert to %s", typ)
|
||||
}
|
||||
}
|
||||
|
||||
func callbackError(ctx *C.sqlite3_context, err error) {
|
||||
cstr := C.CString(err.Error())
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.sqlite3_result_error(ctx, cstr, C.int(-1))
|
||||
}
|
||||
|
||||
// Test support code. Tests are not allowed to import "C", so we can't
|
||||
// declare any functions that use C.sqlite3_value.
|
||||
func callbackSyntheticForTests(v reflect.Value, err error) callbackArgConverter {
|
||||
return func(*C.sqlite3_value) (reflect.Value, error) {
|
||||
return v, err
|
||||
}
|
||||
}
|
299
vendor/github.com/mattn/go-sqlite3/convert.go
generated
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
// Extracted from Go database/sql source code
|
||||
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Type conversions for Scan.
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
var errNilPtr = errors.New("destination pointer is nil") // embedded in descriptive error
|
||||
|
||||
// convertAssign copies to dest the value in src, converting it if possible.
|
||||
// An error is returned if the copy would result in loss of information.
|
||||
// dest should be a pointer type.
|
||||
func convertAssign(dest, src interface{}) error {
|
||||
// Common cases, without reflect.
|
||||
switch s := src.(type) {
|
||||
case string:
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = []byte(s)
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = append((*d)[:0], s...)
|
||||
return nil
|
||||
}
|
||||
case []byte:
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = string(s)
|
||||
return nil
|
||||
case *interface{}:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = cloneBytes(s)
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = cloneBytes(s)
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s
|
||||
return nil
|
||||
}
|
||||
case time.Time:
|
||||
switch d := dest.(type) {
|
||||
case *time.Time:
|
||||
*d = s
|
||||
return nil
|
||||
case *string:
|
||||
*d = s.Format(time.RFC3339Nano)
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = []byte(s.Format(time.RFC3339Nano))
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s.AppendFormat((*d)[:0], time.RFC3339Nano)
|
||||
return nil
|
||||
}
|
||||
case nil:
|
||||
switch d := dest.(type) {
|
||||
case *interface{}:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var sv reflect.Value
|
||||
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
sv = reflect.ValueOf(src)
|
||||
switch sv.Kind() {
|
||||
case reflect.Bool,
|
||||
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64:
|
||||
*d = asString(src)
|
||||
return nil
|
||||
}
|
||||
case *[]byte:
|
||||
sv = reflect.ValueOf(src)
|
||||
if b, ok := asBytes(nil, sv); ok {
|
||||
*d = b
|
||||
return nil
|
||||
}
|
||||
case *sql.RawBytes:
|
||||
sv = reflect.ValueOf(src)
|
||||
if b, ok := asBytes([]byte(*d)[:0], sv); ok {
|
||||
*d = sql.RawBytes(b)
|
||||
return nil
|
||||
}
|
||||
case *bool:
|
||||
bv, err := driver.Bool.ConvertValue(src)
|
||||
if err == nil {
|
||||
*d = bv.(bool)
|
||||
}
|
||||
return err
|
||||
case *interface{}:
|
||||
*d = src
|
||||
return nil
|
||||
}
|
||||
|
||||
if scanner, ok := dest.(sql.Scanner); ok {
|
||||
return scanner.Scan(src)
|
||||
}
|
||||
|
||||
dpv := reflect.ValueOf(dest)
|
||||
if dpv.Kind() != reflect.Ptr {
|
||||
return errors.New("destination not a pointer")
|
||||
}
|
||||
if dpv.IsNil() {
|
||||
return errNilPtr
|
||||
}
|
||||
|
||||
if !sv.IsValid() {
|
||||
sv = reflect.ValueOf(src)
|
||||
}
|
||||
|
||||
dv := reflect.Indirect(dpv)
|
||||
if sv.IsValid() && sv.Type().AssignableTo(dv.Type()) {
|
||||
switch b := src.(type) {
|
||||
case []byte:
|
||||
dv.Set(reflect.ValueOf(cloneBytes(b)))
|
||||
default:
|
||||
dv.Set(sv)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if dv.Kind() == sv.Kind() && sv.Type().ConvertibleTo(dv.Type()) {
|
||||
dv.Set(sv.Convert(dv.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
// The following conversions use a string value as an intermediate representation
|
||||
// to convert between various numeric types.
|
||||
//
|
||||
// This also allows scanning into user defined types such as "type Int int64".
|
||||
// For symmetry, also check for string destination types.
|
||||
switch dv.Kind() {
|
||||
case reflect.Ptr:
|
||||
if src == nil {
|
||||
dv.Set(reflect.Zero(dv.Type()))
|
||||
return nil
|
||||
}
|
||||
dv.Set(reflect.New(dv.Type().Elem()))
|
||||
return convertAssign(dv.Interface(), src)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
s := asString(src)
|
||||
i64, err := strconv.ParseInt(s, 10, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetInt(i64)
|
||||
return nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
s := asString(src)
|
||||
u64, err := strconv.ParseUint(s, 10, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetUint(u64)
|
||||
return nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
s := asString(src)
|
||||
f64, err := strconv.ParseFloat(s, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetFloat(f64)
|
||||
return nil
|
||||
case reflect.String:
|
||||
switch v := src.(type) {
|
||||
case string:
|
||||
dv.SetString(v)
|
||||
return nil
|
||||
case []byte:
|
||||
dv.SetString(string(v))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("unsupported Scan, storing driver.Value type %T into type %T", src, dest)
|
||||
}
|
||||
|
||||
func strconvErr(err error) error {
|
||||
if ne, ok := err.(*strconv.NumError); ok {
|
||||
return ne.Err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func cloneBytes(b []byte) []byte {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
c := make([]byte, len(b))
|
||||
copy(c, b)
|
||||
return c
|
||||
}
|
||||
|
||||
func asString(src interface{}) string {
|
||||
switch v := src.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []byte:
|
||||
return string(v)
|
||||
}
|
||||
rv := reflect.ValueOf(src)
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.FormatInt(rv.Int(), 10)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.FormatUint(rv.Uint(), 10)
|
||||
case reflect.Float64:
|
||||
return strconv.FormatFloat(rv.Float(), 'g', -1, 64)
|
||||
case reflect.Float32:
|
||||
return strconv.FormatFloat(rv.Float(), 'g', -1, 32)
|
||||
case reflect.Bool:
|
||||
return strconv.FormatBool(rv.Bool())
|
||||
}
|
||||
return fmt.Sprintf("%v", src)
|
||||
}
|
||||
|
||||
func asBytes(buf []byte, rv reflect.Value) (b []byte, ok bool) {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.AppendInt(buf, rv.Int(), 10), true
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.AppendUint(buf, rv.Uint(), 10), true
|
||||
case reflect.Float32:
|
||||
return strconv.AppendFloat(buf, rv.Float(), 'g', -1, 32), true
|
||||
case reflect.Float64:
|
||||
return strconv.AppendFloat(buf, rv.Float(), 'g', -1, 64), true
|
||||
case reflect.Bool:
|
||||
return strconv.AppendBool(buf, rv.Bool()), true
|
||||
case reflect.String:
|
||||
s := rv.String()
|
||||
return append(buf, s...), true
|
||||
}
|
||||
return
|
||||
}
|
135
vendor/github.com/mattn/go-sqlite3/doc.go
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
/*
|
||||
Package sqlite3 provides interface to SQLite3 databases.
|
||||
|
||||
This works as a driver for database/sql.
|
||||
|
||||
Installation
|
||||
|
||||
go get github.com/mattn/go-sqlite3
|
||||
|
||||
Supported Types
|
||||
|
||||
Currently, go-sqlite3 supports the following data types.
|
||||
|
||||
+------------------------------+
|
||||
|go | sqlite3 |
|
||||
|----------|-------------------|
|
||||
|nil | null |
|
||||
|int | integer |
|
||||
|int64 | integer |
|
||||
|float64 | float |
|
||||
|bool | integer |
|
||||
|[]byte | blob |
|
||||
|string | text |
|
||||
|time.Time | timestamp/datetime|
|
||||
+------------------------------+
|
||||
|
||||
SQLite3 Extension
|
||||
|
||||
You can write your own extension module for sqlite3. For example, below is an
|
||||
extension for a Regexp matcher operation.
|
||||
|
||||
#include <pcre.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <sqlite3ext.h>
|
||||
|
||||
SQLITE_EXTENSION_INIT1
|
||||
static void regexp_func(sqlite3_context *context, int argc, sqlite3_value **argv) {
|
||||
if (argc >= 2) {
|
||||
const char *target = (const char *)sqlite3_value_text(argv[1]);
|
||||
const char *pattern = (const char *)sqlite3_value_text(argv[0]);
|
||||
const char* errstr = NULL;
|
||||
int erroff = 0;
|
||||
int vec[500];
|
||||
int n, rc;
|
||||
pcre* re = pcre_compile(pattern, 0, &errstr, &erroff, NULL);
|
||||
rc = pcre_exec(re, NULL, target, strlen(target), 0, 0, vec, 500);
|
||||
if (rc <= 0) {
|
||||
sqlite3_result_error(context, errstr, 0);
|
||||
return;
|
||||
}
|
||||
sqlite3_result_int(context, 1);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
__declspec(dllexport)
|
||||
#endif
|
||||
int sqlite3_extension_init(sqlite3 *db, char **errmsg,
|
||||
const sqlite3_api_routines *api) {
|
||||
SQLITE_EXTENSION_INIT2(api);
|
||||
return sqlite3_create_function(db, "regexp", 2, SQLITE_UTF8,
|
||||
(void*)db, regexp_func, NULL, NULL);
|
||||
}
|
||||
|
||||
It needs to be built as a so/dll shared library. And you need to register
|
||||
the extension module like below.
|
||||
|
||||
sql.Register("sqlite3_with_extensions",
|
||||
&sqlite3.SQLiteDriver{
|
||||
Extensions: []string{
|
||||
"sqlite3_mod_regexp",
|
||||
},
|
||||
})
|
||||
|
||||
Then, you can use this extension.
|
||||
|
||||
rows, err := db.Query("select text from mytable where name regexp '^golang'")
|
||||
|
||||
Connection Hook
|
||||
|
||||
You can hook and inject your code when the connection is established by setting
|
||||
ConnectHook to get the SQLiteConn.
|
||||
|
||||
sql.Register("sqlite3_with_hook_example",
|
||||
&sqlite3.SQLiteDriver{
|
||||
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
|
||||
sqlite3conn = append(sqlite3conn, conn)
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
You can also use database/sql.Conn.Raw (Go >= 1.13):
|
||||
|
||||
conn, err := db.Conn(context.Background())
|
||||
// if err != nil { ... }
|
||||
defer conn.Close()
|
||||
err = conn.Raw(func (driverConn interface{}) error {
|
||||
sqliteConn := driverConn.(*sqlite3.SQLiteConn)
|
||||
// ... use sqliteConn
|
||||
})
|
||||
// if err != nil { ... }
|
||||
|
||||
Go SQlite3 Extensions
|
||||
|
||||
If you want to register Go functions as SQLite extension functions
|
||||
you can make a custom driver by calling RegisterFunction from
|
||||
ConnectHook.
|
||||
|
||||
regex = func(re, s string) (bool, error) {
|
||||
return regexp.MatchString(re, s)
|
||||
}
|
||||
sql.Register("sqlite3_extended",
|
||||
&sqlite3.SQLiteDriver{
|
||||
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
|
||||
return conn.RegisterFunc("regexp", regex, true)
|
||||
},
|
||||
})
|
||||
|
||||
You can then use the custom driver by passing its name to sql.Open.
|
||||
|
||||
var i int
|
||||
conn, err := sql.Open("sqlite3_extended", "./foo.db")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = db.QueryRow(`SELECT regexp("foo.*", "seafood")`).Scan(&i)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
See the documentation of RegisterFunc for more details.
|
||||
|
||||
*/
|
||||
package sqlite3
|
150
vendor/github.com/mattn/go-sqlite3/error.go
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include <sqlite3-binding.h>
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
*/
|
||||
import "C"
|
||||
import "syscall"
|
||||
|
||||
// ErrNo inherit errno.
|
||||
type ErrNo int
|
||||
|
||||
// ErrNoMask is mask code.
|
||||
const ErrNoMask C.int = 0xff
|
||||
|
||||
// ErrNoExtended is extended errno.
|
||||
type ErrNoExtended int
|
||||
|
||||
// Error implement sqlite error code.
|
||||
type Error struct {
|
||||
Code ErrNo /* The error code returned by SQLite */
|
||||
ExtendedCode ErrNoExtended /* The extended error code returned by SQLite */
|
||||
SystemErrno syscall.Errno /* The system errno returned by the OS through SQLite, if applicable */
|
||||
err string /* The error string returned by sqlite3_errmsg(),
|
||||
this usually contains more specific details. */
|
||||
}
|
||||
|
||||
// result codes from http://www.sqlite.org/c3ref/c_abort.html
|
||||
var (
|
||||
ErrError = ErrNo(1) /* SQL error or missing database */
|
||||
ErrInternal = ErrNo(2) /* Internal logic error in SQLite */
|
||||
ErrPerm = ErrNo(3) /* Access permission denied */
|
||||
ErrAbort = ErrNo(4) /* Callback routine requested an abort */
|
||||
ErrBusy = ErrNo(5) /* The database file is locked */
|
||||
ErrLocked = ErrNo(6) /* A table in the database is locked */
|
||||
ErrNomem = ErrNo(7) /* A malloc() failed */
|
||||
ErrReadonly = ErrNo(8) /* Attempt to write a readonly database */
|
||||
ErrInterrupt = ErrNo(9) /* Operation terminated by sqlite3_interrupt() */
|
||||
ErrIoErr = ErrNo(10) /* Some kind of disk I/O error occurred */
|
||||
ErrCorrupt = ErrNo(11) /* The database disk image is malformed */
|
||||
ErrNotFound = ErrNo(12) /* Unknown opcode in sqlite3_file_control() */
|
||||
ErrFull = ErrNo(13) /* Insertion failed because database is full */
|
||||
ErrCantOpen = ErrNo(14) /* Unable to open the database file */
|
||||
ErrProtocol = ErrNo(15) /* Database lock protocol error */
|
||||
ErrEmpty = ErrNo(16) /* Database is empty */
|
||||
ErrSchema = ErrNo(17) /* The database schema changed */
|
||||
ErrTooBig = ErrNo(18) /* String or BLOB exceeds size limit */
|
||||
ErrConstraint = ErrNo(19) /* Abort due to constraint violation */
|
||||
ErrMismatch = ErrNo(20) /* Data type mismatch */
|
||||
ErrMisuse = ErrNo(21) /* Library used incorrectly */
|
||||
ErrNoLFS = ErrNo(22) /* Uses OS features not supported on host */
|
||||
ErrAuth = ErrNo(23) /* Authorization denied */
|
||||
ErrFormat = ErrNo(24) /* Auxiliary database format error */
|
||||
ErrRange = ErrNo(25) /* 2nd parameter to sqlite3_bind out of range */
|
||||
ErrNotADB = ErrNo(26) /* File opened that is not a database file */
|
||||
ErrNotice = ErrNo(27) /* Notifications from sqlite3_log() */
|
||||
ErrWarning = ErrNo(28) /* Warnings from sqlite3_log() */
|
||||
)
|
||||
|
||||
// Error return error message from errno.
|
||||
func (err ErrNo) Error() string {
|
||||
return Error{Code: err}.Error()
|
||||
}
|
||||
|
||||
// Extend return extended errno.
|
||||
func (err ErrNo) Extend(by int) ErrNoExtended {
|
||||
return ErrNoExtended(int(err) | (by << 8))
|
||||
}
|
||||
|
||||
// Error return error message that is extended code.
|
||||
func (err ErrNoExtended) Error() string {
|
||||
return Error{Code: ErrNo(C.int(err) & ErrNoMask), ExtendedCode: err}.Error()
|
||||
}
|
||||
|
||||
func (err Error) Error() string {
|
||||
var str string
|
||||
if err.err != "" {
|
||||
str = err.err
|
||||
} else {
|
||||
str = C.GoString(C.sqlite3_errstr(C.int(err.Code)))
|
||||
}
|
||||
if err.SystemErrno != 0 {
|
||||
str += ": " + err.SystemErrno.Error()
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// result codes from http://www.sqlite.org/c3ref/c_abort_rollback.html
|
||||
var (
|
||||
ErrIoErrRead = ErrIoErr.Extend(1)
|
||||
ErrIoErrShortRead = ErrIoErr.Extend(2)
|
||||
ErrIoErrWrite = ErrIoErr.Extend(3)
|
||||
ErrIoErrFsync = ErrIoErr.Extend(4)
|
||||
ErrIoErrDirFsync = ErrIoErr.Extend(5)
|
||||
ErrIoErrTruncate = ErrIoErr.Extend(6)
|
||||
ErrIoErrFstat = ErrIoErr.Extend(7)
|
||||
ErrIoErrUnlock = ErrIoErr.Extend(8)
|
||||
ErrIoErrRDlock = ErrIoErr.Extend(9)
|
||||
ErrIoErrDelete = ErrIoErr.Extend(10)
|
||||
ErrIoErrBlocked = ErrIoErr.Extend(11)
|
||||
ErrIoErrNoMem = ErrIoErr.Extend(12)
|
||||
ErrIoErrAccess = ErrIoErr.Extend(13)
|
||||
ErrIoErrCheckReservedLock = ErrIoErr.Extend(14)
|
||||
ErrIoErrLock = ErrIoErr.Extend(15)
|
||||
ErrIoErrClose = ErrIoErr.Extend(16)
|
||||
ErrIoErrDirClose = ErrIoErr.Extend(17)
|
||||
ErrIoErrSHMOpen = ErrIoErr.Extend(18)
|
||||
ErrIoErrSHMSize = ErrIoErr.Extend(19)
|
||||
ErrIoErrSHMLock = ErrIoErr.Extend(20)
|
||||
ErrIoErrSHMMap = ErrIoErr.Extend(21)
|
||||
ErrIoErrSeek = ErrIoErr.Extend(22)
|
||||
ErrIoErrDeleteNoent = ErrIoErr.Extend(23)
|
||||
ErrIoErrMMap = ErrIoErr.Extend(24)
|
||||
ErrIoErrGetTempPath = ErrIoErr.Extend(25)
|
||||
ErrIoErrConvPath = ErrIoErr.Extend(26)
|
||||
ErrLockedSharedCache = ErrLocked.Extend(1)
|
||||
ErrBusyRecovery = ErrBusy.Extend(1)
|
||||
ErrBusySnapshot = ErrBusy.Extend(2)
|
||||
ErrCantOpenNoTempDir = ErrCantOpen.Extend(1)
|
||||
ErrCantOpenIsDir = ErrCantOpen.Extend(2)
|
||||
ErrCantOpenFullPath = ErrCantOpen.Extend(3)
|
||||
ErrCantOpenConvPath = ErrCantOpen.Extend(4)
|
||||
ErrCorruptVTab = ErrCorrupt.Extend(1)
|
||||
ErrReadonlyRecovery = ErrReadonly.Extend(1)
|
||||
ErrReadonlyCantLock = ErrReadonly.Extend(2)
|
||||
ErrReadonlyRollback = ErrReadonly.Extend(3)
|
||||
ErrReadonlyDbMoved = ErrReadonly.Extend(4)
|
||||
ErrAbortRollback = ErrAbort.Extend(2)
|
||||
ErrConstraintCheck = ErrConstraint.Extend(1)
|
||||
ErrConstraintCommitHook = ErrConstraint.Extend(2)
|
||||
ErrConstraintForeignKey = ErrConstraint.Extend(3)
|
||||
ErrConstraintFunction = ErrConstraint.Extend(4)
|
||||
ErrConstraintNotNull = ErrConstraint.Extend(5)
|
||||
ErrConstraintPrimaryKey = ErrConstraint.Extend(6)
|
||||
ErrConstraintTrigger = ErrConstraint.Extend(7)
|
||||
ErrConstraintUnique = ErrConstraint.Extend(8)
|
||||
ErrConstraintVTab = ErrConstraint.Extend(9)
|
||||
ErrConstraintRowID = ErrConstraint.Extend(10)
|
||||
ErrNoticeRecoverWAL = ErrNotice.Extend(1)
|
||||
ErrNoticeRecoverRollback = ErrNotice.Extend(2)
|
||||
ErrWarningAutoIndex = ErrWarning.Extend(1)
|
||||
)
|