pax_global_header 0000666 0000000 0000000 00000000064 14756465373 0014536 g ustar 00root root 0000000 0000000 52 comment=600f19cc875ed360c541d2690e456f46fecca3a6
v2-2.2.6/ 0000775 0000000 0000000 00000000000 14756465373 0012074 5 ustar 00root root 0000000 0000000 v2-2.2.6/.devcontainer/ 0000775 0000000 0000000 00000000000 14756465373 0014633 5 ustar 00root root 0000000 0000000 v2-2.2.6/.devcontainer/devcontainer.json 0000664 0000000 0000000 00000001345 14756465373 0020212 0 ustar 00root root 0000000 0000000 {
"name": "Miniflux",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspace",
"remoteUser": "vscode",
"forwardPorts": [
8080
],
"features": {
"ghcr.io/devcontainers/features/github-cli:1": {},
"ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}
},
"customizations": {
"vscode": {
"settings": {
"go.toolsManagement.checkForUpdates": "local",
"go.useLanguageServer": true,
"go.gopath": "/go"
},
"extensions": [
"ms-azuretools.vscode-docker",
"golang.go",
"rangav.vscode-thunder-client",
"GitHub.codespaces",
"GitHub.copilot",
"GitHub.copilot-chat"
]
}
}
} v2-2.2.6/.devcontainer/docker-compose.yml 0000664 0000000 0000000 00000001322 14756465373 0020266 0 ustar 00root root 0000000 0000000 version: '3.8'
services:
app:
image: mcr.microsoft.com/devcontainers/go:1.23
volumes:
- ..:/workspace:cached
command: sleep infinity
network_mode: service:db
environment:
- CREATE_ADMIN=1
- ADMIN_USERNAME=admin
- ADMIN_PASSWORD=test123
db:
image: postgres:15
restart: unless-stopped
volumes:
- postgres-data:/var/lib/postgresql/data
hostname: postgres
environment:
POSTGRES_DB: miniflux2
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_HOST_AUTH_METHOD: trust
ports:
- 5432:5432
apprise:
image: caronc/apprise:1.0
restart: unless-stopped
hostname: apprise
volumes:
postgres-data: null
v2-2.2.6/.github/ 0000775 0000000 0000000 00000000000 14756465373 0013434 5 ustar 00root root 0000000 0000000 v2-2.2.6/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 14756465373 0015617 5 ustar 00root root 0000000 0000000 v2-2.2.6/.github/ISSUE_TEMPLATE/bug_report.md 0000664 0000000 0000000 00000000152 14756465373 0020307 0 ustar 00root root 0000000 0000000 ---
name: Bug report
about: Create a bug report
title: ''
labels: bug, triage needed
assignees: ''
---
v2-2.2.6/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000034 14756465373 0017604 0 ustar 00root root 0000000 0000000 blank_issues_enabled: false
v2-2.2.6/.github/ISSUE_TEMPLATE/feature_request.md 0000664 0000000 0000000 00000000310 14756465373 0021336 0 ustar 00root root 0000000 0000000 ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: wishlist
assignees: ''
---
- [ ] I have read this document: https://miniflux.app/opinionated.html#feature-request
v2-2.2.6/.github/ISSUE_TEMPLATE/feed-problems.md 0000664 0000000 0000000 00000000205 14756465373 0020662 0 ustar 00root root 0000000 0000000 ---
name: Feed Problems
about: Problems with a feed or a website
title: ''
labels: feed problems, triage needed
assignees: ''
---
v2-2.2.6/.github/dependabot.yml 0000664 0000000 0000000 00000002040 14756465373 0016260 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: "gomod"
directory: "/"
schedule:
interval: "daily"
reviewers:
- "fguillot"
assignees:
- "fguillot"
- package-ecosystem: "docker"
directory: "/packaging/docker/alpine"
schedule:
interval: "weekly"
reviewers:
- "fguillot"
assignees:
- "fguillot"
- package-ecosystem: "docker"
directory: "/packaging/docker/distroless"
schedule:
interval: "weekly"
reviewers:
- "fguillot"
assignees:
- "fguillot"
- package-ecosystem: "docker"
directory: "packaging/debian"
schedule:
interval: "weekly"
reviewers:
- "fguillot"
assignees:
- "fguillot"
- package-ecosystem: "docker"
directory: "packaging/rpm"
schedule:
interval: "weekly"
reviewers:
- "fguillot"
assignees:
- "fguillot"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
reviewers:
- "fguillot"
assignees:
- "fguillot"
v2-2.2.6/.github/pull_request_template.md 0000664 0000000 0000000 00000000563 14756465373 0020401 0 ustar 00root root 0000000 0000000 Have you followed these guidelines?
- [ ] I have tested my changes
- [ ] There are no breaking changes
- [ ] I have thoroughly tested my changes and verified there are no regressions
- [ ] My commit messages follow the [Conventional Commits specification](https://www.conventionalcommits.org/)
- [ ] I have read this document: https://miniflux.app/faq.html#pull-request
v2-2.2.6/.github/workflows/ 0000775 0000000 0000000 00000000000 14756465373 0015471 5 ustar 00root root 0000000 0000000 v2-2.2.6/.github/workflows/build_binaries.yml 0000664 0000000 0000000 00000001212 14756465373 0021163 0 ustar 00root root 0000000 0000000 name: Build Binaries
on:
workflow_dispatch:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Golang
uses: actions/setup-go@v5
with:
go-version: "1.24.x"
check-latest: true
- name: Compile binaries
env:
CGO_ENABLED: 0
run: make build
- name: Upload binaries
uses: actions/upload-artifact@v4
with:
name: binaries
path: miniflux-*
if-no-files-found: error
retention-days: 5
v2-2.2.6/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000001611 14756465373 0021303 0 ustar 00root root 0000000 0000000 name: "CodeQL"
permissions: read-all
on:
push:
branches: [ main ]
paths:
- '**.js'
- '**.go'
- '!**_test.go'
pull_request:
# The branches below must be a subset of the branches above
branches: [ main ]
paths:
- '**.js'
- '**.go'
- '!**_test.go'
schedule:
- cron: '45 22 * * 3'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.24.x"
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
- name: Autobuild
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
v2-2.2.6/.github/workflows/debian_packages.yml 0000664 0000000 0000000 00000004724 14756465373 0021303 0 ustar 00root root 0000000 0000000 name: Debian Packages
permissions: read-all
on:
workflow_dispatch:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
schedule:
- cron: '0 0 * * 1,4' # Runs at 00:00 UTC on Monday and Thursday
pull_request:
branches: [ main ]
paths:
- 'packaging/debian/**' # Only run on changes to the debian packaging files
jobs:
test-packages:
if: github.event_name == 'schedule' || github.event_name == 'pull_request'
name: Test Packages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Available Docker Platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Build Debian Packages
run: make debian-packages
- name: List generated files
run: ls -l *.deb
build-packages-manually:
if: github.event_name == 'workflow_dispatch'
name: Build Packages Manually
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Available Docker Platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Build Debian Packages
run: make debian-packages
- name: Upload package
uses: actions/upload-artifact@v4
with:
name: packages
path: "*.deb"
if-no-files-found: error
retention-days: 3
publish-packages:
if: github.event_name == 'push'
name: Publish Packages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Available Docker Platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Build Debian Packages
run: make debian-packages
- name: List generated files
run: ls -l *.deb
- name: Upload packages to repository
env:
FURY_TOKEN: ${{ secrets.FURY_TOKEN }}
run: for f in *.deb; do curl -F package=@$f https://$FURY_TOKEN@push.fury.io/miniflux/; done
v2-2.2.6/.github/workflows/docker.yml 0000664 0000000 0000000 00000006341 14756465373 0017467 0 ustar 00root root 0000000 0000000 name: Docker
on:
schedule:
- cron: '0 1 * * *'
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
pull_request:
branches: [ main ]
paths:
- 'packaging/docker/**'
jobs:
docker-images:
name: Docker Images
permissions:
packages: write
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Generate Alpine Docker tags
id: docker_alpine_tags
uses: docker/metadata-action@v5
with:
images: |
docker.io/${{ github.repository_owner }}/miniflux
ghcr.io/${{ github.repository_owner }}/miniflux
quay.io/${{ github.repository_owner }}/miniflux
tags: |
type=ref,event=pr
type=schedule,pattern=nightly
type=semver,pattern={{raw}}
- name: Generate Distroless Docker tags
id: docker_distroless_tags
uses: docker/metadata-action@v5
with:
images: |
docker.io/${{ github.repository_owner }}/miniflux
ghcr.io/${{ github.repository_owner }}/miniflux
quay.io/${{ github.repository_owner }}/miniflux
tags: |
type=ref,event=pr
type=schedule,pattern=nightly
type=semver,pattern={{raw}}
flavor: |
suffix=-distroless,onlatest=true
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
if: ${{ github.event_name != 'pull_request' && vars.PUBLISH_DOCKER_IMAGES == 'true' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' && vars.PUBLISH_DOCKER_IMAGES == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Quay Container Registry
if: ${{ github.event_name != 'pull_request' && vars.PUBLISH_DOCKER_IMAGES == 'true' }}
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_TOKEN }}
- name: Build and Push Alpine images
uses: docker/build-push-action@v6
if: ${{ vars.PUBLISH_DOCKER_IMAGES == 'true' }}
with:
context: .
file: ./packaging/docker/alpine/Dockerfile
platforms: linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_alpine_tags.outputs.tags }}
- name: Build and Push Distroless images
uses: docker/build-push-action@v6
if: ${{ vars.PUBLISH_DOCKER_IMAGES == 'true' }}
with:
context: .
file: ./packaging/docker/distroless/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_distroless_tags.outputs.tags }}
v2-2.2.6/.github/workflows/linters.yml 0000664 0000000 0000000 00000003205 14756465373 0017674 0 ustar 00root root 0000000 0000000 name: Linters
permissions: read-all
on:
pull_request:
branches:
- main
workflow_dispatch:
jobs:
jshint:
name: Javascript Linter
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install linters
run: |
sudo npm install -g jshint@2.13.6 eslint@8.57.0
- name: Run jshint
run: jshint internal/ui/static/js/*.js
- name: Run ESLint
run: eslint internal/ui/static/js/*.js
golangci:
name: Golang Linters
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.24.x"
- uses: golangci/golangci-lint-action@v6
with:
args: >
--timeout 10m
--exclude-dirs=tests
--disable errcheck
--enable sqlclosecheck,misspell,gofmt,goimports,whitespace,gocritic
- uses: dominikh/staticcheck-action@v1.3.1
with:
version: "latest"
install-go: false
commitlint:
name: Commit Linter
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "lts/*"
- name: Install commitlint
run: |
npm install --save-dev @commitlint/config-conventional @commitlint/cli
echo "module.exports = {extends: ['@commitlint/config-conventional']}" > commitlint.config.js
- name: Validate PR commits
run: npx commitlint --from ${{ github.event.pull_request.base.sha }} --to ${{ github.event.pull_request.head.sha }} --verbose
v2-2.2.6/.github/workflows/rpm_packages.yml 0000664 0000000 0000000 00000003100 14756465373 0020642 0 ustar 00root root 0000000 0000000 name: RPM Packages
permissions: read-all
on:
workflow_dispatch:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
schedule:
- cron: '0 0 * * 1,4' # Runs at 00:00 UTC on Monday and Thursday
pull_request:
branches: [ main ]
paths:
- 'packaging/rpm/**' # Only run on changes to the rpm packaging files
jobs:
test-package:
if: github.event_name == 'schedule' || github.event_name == 'pull_request'
name: Test Packages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build RPM Package
run: make rpm
- name: List generated files
run: ls -l *.rpm
build-package-manually:
if: github.event_name == 'workflow_dispatch'
name: Build Packages Manually
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build RPM Package
run: make rpm
- name: Upload package
uses: actions/upload-artifact@v4
with:
name: packages
path: "*.rpm"
if-no-files-found: error
retention-days: 3
publish-package:
if: github.event_name == 'push'
name: Publish Packages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build RPM Package
run: make rpm
- name: List generated files
run: ls -l *.rpm
- name: Upload package to repository
env:
FURY_TOKEN: ${{ secrets.FURY_TOKEN }}
run: for f in *.rpm; do curl -F package=@$f https://$FURY_TOKEN@push.fury.io/miniflux/; done
v2-2.2.6/.github/workflows/tests.yml 0000664 0000000 0000000 00000003065 14756465373 0017362 0 ustar 00root root 0000000 0000000 name: Tests
permissions: read-all
on:
pull_request:
branches:
- main
workflow_dispatch:
jobs:
unit-tests:
name: Unit Tests
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 4
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
go-version: ["1.24.x"]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Run unit tests with coverage and race conditions checking
if: matrix.os == 'ubuntu-latest'
run: make test
- name: Run unit tests without coverage and race conditions checking
if: matrix.os != 'ubuntu-latest'
run: go test ./...
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
services:
postgres:
image: postgres:9.5
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: "1.24.x"
- name: Install Postgres client
run: sudo apt update && sudo apt install -y postgresql-client
- name: Run integration tests
run: make integration-test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PGHOST: 127.0.0.1
PGPASSWORD: postgres
v2-2.2.6/.gitignore 0000664 0000000 0000000 00000000072 14756465373 0014063 0 ustar 00root root 0000000 0000000 ./*.sha256
./miniflux
.idea
.vscode
*.deb
*.rpm
miniflux-* v2-2.2.6/ChangeLog 0000664 0000000 0000000 00000273414 14756465373 0013661 0 ustar 00root root 0000000 0000000 Version 2.2.6 (February 22, 2025)
---------------------------------
* test(encoding): add unit tests for `CharsetReader` function
* refactor(xml): improve the performances of `NewXMLDecoder`
* refactor(ui): remove superfluous cast
* refactor(request): broaden an error condition when parsing cookies
* refactor(processor): remove superfluous parenthesis
* refactor(opml): don't define receivers on both values and pointer
* refactor(model): simplify a condition
* refactor(model): don't define methods both on instance and pointer
* refactor(locale): sort JSON documents alphabetically by keys
* refactor(locale): remove superfluous parenthesis
* refactor(js): use proper types in `app.js`
* refactor(js): replace the deprecated `window.pageYOffset` with `window.scollY`
* refactor(js): remove a useless `return`
* refactor(js): anchor `=` removal in `webauthn_handler.js` regex
* refactor(js): add default value for parameter `fallbackSelf` in `goToPage` function
* refactor(integration): don't use `defer` in a loop
* refactor(icon): guard against a potential `null` dereference
* refactor(date): use an else-if instead of two if statements
* refactor(css): use shortcuts to declare padding
* refactor(client): remove a useless cast
* perf(sanitizer): remove two useless calls to `strings.ReplaceAll`
* fix(ui): Redirect correctly post feed removal from category feeds list
* fix(scraper): update TechCrunch scraper rule
* fix(scraper): avoid encoding issue if charset meta tag is after 1024 bytes
* fix(sanitizer): non-allowed attributes are not properly stripped
* fix(sanitizer): correct HTML tag name from `tfooter` to `tfoot`
* fix(rss): handle item title with `CDATA` content correctly
* fix(locale): missing hyphen in `de_DE.json`
* fix(css): avoid aside overflow on the pagination menu
* fix(css): `--entry-content-aside-border-color` is missing from `system.css`
* fix(api): return 500 response when JSON serialization fails
* fix(api): JSON encoding is failing with dates at OAD and negative timezone offset
* feat(urlcleaner): add trackers to the blocklist
* feat(ui): open the `` tag in edit feed page when the feature is enabled
* feat(sanitizer): improve text truncation with better space handling
* feat(sanitizer): allow `img` tags with only a `srcset` and no `src` attribute
* feat(rss): add workaround for RSS item title with HTML content
* feat(pushover): add integration with pushover.net
* feat(processor): fetch YouTube watch time in bulk using the API
* feat(locale): update Traditional Chinese translation
* feat(locale): update Polish translation
* feat(locale): update French translation
* feat(locale): add Taiwanese POJ (nan-Latn-pehoeji)
* feat(integration): update Linkace integration to support API v2
* feat(integration): add webhook URL per feed
* feat(integration): add Slack integration
* feat(css): improve aside element position on smartphone
* ci: update GitHub Actions workflows to use Go 1.24
* ci: trigger packaging tests on pull requests
* ci: add `commitlint` to validate PR commit messages
* build(deps): bump `golang` in `/packaging/debian`
* build(deps): bump `golang.org/x/term` from `0.28.0` to `0.29.0`
* build(deps): bump `golang.org/x/oauth2` from `0.25.0` to `0.26.0`
* build(deps): bump `golang.org/x/net` from `0.34.0` to `0.35.0`
* build(deps): bump `golang.org/x/image` from `0.23.0` to `0.24.0`
* build(deps): bump `golang.org/x/crypto` from `0.32.0` to `0.33.0`
* build(deps): bump `github.com/PuerkitoBio/goquery` from `1.10.1` to `1.10.2`
Version 2.2.5 (January 20, 2025)
--------------------------------
* test(js): improve `.jshintrc` (strict comparison, etc...)
* test(sanitizer): add a fuzzer
* refactor(rewriter): use custom title case converter implementation instead of `golang.org/x/text/cases.Title()`
* refactor(readingtime): replace `whatlanggo` package with an ad-hoc implementation
* refactor(oauth2): no need to use `io.WriteString` when sha256 provides a way to obtain a sum in a single call
* refactor(js): simplify a bit `keyboard_handler.js`
* refactor(js): remove an outdated check for `{passive: true}`
* refactor(js): minor refactoring of `touch_handler.js`
* refactor(js): minor improvements in `app.js`
* refactor(database): add special handling for PostgreSQL-specific migrations
* fix(ui): reading preferences are reset if the form values are incorrect
* fix(sanitizer): allow `` tags
* fix(finder): do not add redirections to the list of subscriptions to avoid confusion
* fix: update Wallabag URL label to avoid confusion
* fix: improve pagination when having identical publication date
* fix: do not strip tags in Atom entry title
* feat(ntfy): Add option to use internal links
* feat(locale): update Polish translation
* feat(locale): update German translation
* feat(integration): add Discord integration
* feat(database): add optional build support for SQLite
* feat: validate usernames upon creation
* feat: replace `%{?systemd_requires}` with `%{?systemd_ordering}`
* feat: bump linter and minifier from ECMAScript 2017 to 2020 (ES11)
* feat: add `fix_ghost_cards` rewrite rule
* ci: tighten the CodeQL rules
* ci: run Docker tests only when the Dockerfiles are modified
* ci: run `-race -cover` only on Ubuntu jobs
* ci: don't specify languages for CodeQL
* ci: don't run `go vet ./...` as it's run as part of `golangci-lint`
* ci: checkout before installing Go to improve cache efficiency
* ci: avoid building Linux packages for each pull-request
* build(deps): bump `golang.org/x/oauth2` from `0.24.0` to `0.25.0`
* build(deps): bump `golang.org/x/net` from `0.33.0` to `0.34.0`
* build(deps): bump `golang.org/x/crypto` from `0.31.0` to `0.32.0`
* build(deps): bump `github.com/tdewolff/minify/v2` from `2.21.2` to `2.21.3`
* build(deps): bump `github.com/PuerkitoBio/goquery` from `1.10.0` to `1.10.1`
* build(deps): bump `github.com/coreos/go-oidc/v3` from `3.11.0` to `3.12.0`
Version 2.2.4 (December 20, 2024)
---------------------------------
* test(rewrite): add unit test for referer rewrite function
* refactor(subscription): use `strings.HasSuffix` instead of a regex in `FindSubscriptionsFromYouTubePlaylistPage`
* refactor(sanitizer): use `token.String()` instead of `html.EscapeString(token.Data)`
* refactor(sanitizer): simplify `isValidTag`
* refactor(sanitizer): simplify `hasRequiredAttributes`
* refactor(sanitizer): remove condition because `config.Opts` is guaranteed to never be nil
* refactor(sanitizer): remove a now-useless function after refactoring
* refactor(sanitizer): refactor conditions to highlight their similitude, enabling further refactoring
* refactor(sanitizer): optimize `strip_tags.go`
* refactor(sanitizer): micro-optimizations of `srcset.go`
* refactor(sanitizer): merge two conditions
* refactor(sanitizer): inline a function in `sanitizeAttributes` and fix a bug in it
* refactor(sanitizer): inline a condition in `sanitizeSrcsetAttr`
* refactor(sanitizer): improve `rewriteIframeURL()`
* refactor(sanitizer): Google+ isn't a thing anymore
* refactor(sanitizer): change the scope of a variable
* refactor(rewriter): replace regex with URL parsing for referrer override
* refactor(rewriter): avoid the use of regex in `addDynamicImage`
* refactor(rewrite): remove unused function arguments
* refactor(readability): various improvements and optimizations
* refactor(readability): simplify the regexes in `readability.go`
* refactor(processor): use URL parsing instead of a regex
* refactor(processor): improve the `rewrite` URL rule regex
* refactor(locale): delay parsing of translations until they're used
* refactor(js): factorise a line in `app.js`
* refactor(handler): delay `store.UserByID()` as much as possible
* refactor(css): replace `-ms-text-size-adjust` with `text-size-adjust`
* refactor(css): remove `-webkit-clip-path`
* refactor(css): factorise `.pagination-next` and `.pagination-last` together
* refactor: use a better construct than `doc.Find(…).First()`
* refactor: use `min/max` instead of `math.Min/math.Max`
* refactor: refactor `internal/reader/readability/testdata`
* refactor: optimize `sanitizeAttributes`
* refactor: get rid of `numberOfPluralFormsPerLanguage` test-only variable
* fix(storage): replace timezone function call with view
* fix(consistency): align feed modification behavior between API and UI
* fix(ci): fix grammar in pull-request template
* fix: load icon from site URL instead of feed URL
* fix: feed icon from xml ignored during force refresh
* feat(rewrite)!: remove `parse_markdown` rewrite rule
* feat(mediaproxy): update predefined referer spoofing rules for restricted media resources
* feat(locale): update translations to clarify readeck URL instead of readeck API endpoint
* feat(locale): update German translations
* feat(locale): update Chinese translations
* feat(apprise): update `SendNotification` to handle multiple entries and add logging
* feat(apprise): add title in notification request body
* feat: resize favicons before storing them in the database
* feat: optionally fetch watch time from YouTube API instead of website
* feat: only show the commit URL if it's not empty on `/about`
* feat: add predefined scraper rules for `arstechnica.com`
* feat: add date-based entry filtering rules
* chore: remove `blog.laravel.com` rewrite rule
* build(deps): bump `library/alpine` in `/packaging/docker/alpine` to `3.21`
* build(deps): bump `golang.org/x/term` from `0.26.0` to `0.27.0`
* build(deps): bump `golang.org/x/net` from `0.31.0` to `0.33.0`
* build(deps): bump `golang.org/x/crypto` from `0.30.0` to `0.31.0`
* build(deps): bump `github.com/tdewolff/minify/v2` from `2.21.1` to `2.21.2`
Version 2.2.3 (November 10, 2024)
---------------------------------
* fix: unable to change password due to a typo in SQL parameter
* fix: show only one player when there are several audio/video enclosures
* feat(mediaproxy): pass original filename in `Content-Disposition` header
* feat(mediaproxy): implement referer spoofing for restricted media resources
* feat(integration): update Shiori integration to use new API endpoints for login/bookmark
* build(deps): bump `golang.org/x/text` from `0.19.0` to `0.20.0`
* build(deps): bump `golang.org/x/term` from `0.25.0` to `0.26.0`
* build(deps): bump `golang.org/x/oauth2` from `0.23.0` to `0.24.0`
* build(deps): bump `golang.org/x/net` from `0.30.0` to `0.31.0`
* build(deps): bump `golang.org/x/crypto` from `0.28.0` to `0.29.0`
Version 2.2.2 (October 29, 2024)
--------------------------------
* fix(webauthn): add backup eligibility flag workaround to avoid a 401 response
* fix: update `Last-Modified` if it changes in a 304 response
* feat(webauthn): show help message regarding username and non-discoverable credentials
* feat(rss): calculate hash based on item title/content for feeds without GUID and link
* feat(locale): update Chinese translations
* feat(locale): update Polish translations
* feat(integration): add Cubox integration
* feat(client): add `custom_js` field to Go API client
* feat(api): add endpoint for user integration status
* feat: update feed icon during force refresh
* feat: take `Retry-After` header into consideration for rate limited feeds
* feat: set entry URL to rewritten URL if a rewrite rule is defined
* feat: replace `xurls` third-party module with an ad-hoc regexp
* feat: add new settings option to allow external fonts
* feat: add custom user JavaScript similar to custom CSS
* chore: update test case comment
* build(deps): bump `golang.org/x/net` from `0.29.0` to `0.30.0`
* build(deps): bump `github.com/yuin/goldmark` from `1.7.4` to `1.7.8`
* build(deps): bump `github.com/tdewolff/minify/v2` from `2.20.37` to `2.21.1`
* build(deps): bump `github.com/prometheus/client_golang`
* build(deps): bump `github.com/andybalholm/brotli` from `1.1.0` to `1.1.1`
Version 2.2.1 (September 28, 2024)
----------------------------------
* refactor: split processor package into smaller files
* fix(mediaproxy): forward client user-agent to origin to bypass bot protection
* fix: use root URL to generate absolute proxy URL
* fix: remove progression save on shared entry
* fix: add datasource variable and upgrade depecrated panels on the Grafana dashboard
* feat(locale): update zh_CN translations
* feat(locale): update Ukrainian translations
* feat(locale): update Spanish translations
* feat(locale): update Dutch translations
* feat: use Bilibili API instead of web scraping to get videos watch time
* feat: add pagination to shared entries listing
* feat: add button to show only starred entries per category
* build(deps): bump `golang.org/x/term` from `0.23.0` to `0.24.0`
* build(deps): bump `golang.org/x/oauth2` from `0.22.0` to `0.23.0`
* build(deps): bump `golang.org/x/net` from `0.28.0` to `0.29.0`
* build(deps): bump `github.com/PuerkitoBio/goquery` from `1.9.2` to `1.10.0`
* build(deps): bump `github.com/prometheus/client_golang` from `1.20.3` to `1.20.4`
* build(deps): bump `github.com/go-webauthn/webauthn` from `0.10.2` to `0.11.2`
* build: update go.mod to Go 1.23
* build: bump devcontainer version to go 1.23
Version 2.2.0 (August 18, 2024)
-------------------------------
* refactor: simplify Youtube feeds discovery
* fix(integration): define content encoding explicitly when sending article body to Readeck
* fix(fever): correct sorting direction when using `max_id` argument
* fix(client): Return `nil` and error if endpoint is an empty string
* fix: video poster image URL is encoded twice when using `MEDIA_PROXY_MODE=all`
* fix: use `BASE_URL` instead of `r.Host` to generate absolute media proxy URL
* fix: panic during YouTube channel feed discovery
* fix: honor `hide_globally` when creating a new feed through the api
* fix: align pagination correctly on small screens with non-English text
* fix: `store.GetEnclosure()` should return `nil` if no rows are returned
* feat(locale): update Turkish translations
* feat(locale): update French translations
* feat(locale): update Chinese` translations
* feat(integration): add ntfy integration
* feat(api): add API routes `/v1/enclosures/{enclosureID}`
* feat: validate `OAUTH2_PROVIDER` config option value
* feat: remove YouTube video page subscription finder because `meta[itemprop="channelId"]` no longer exists
* feat: remove well-known URL parameter trackers
* feat: mark media as read when playback reaches 90%
* feat: change log level to info when running migrations
* feat: allow customizing the display name of the OpenID Connect provider
* feat: add support for `base` HTML element when discovering feeds
* feat: add support for `aside` HTML element in entry content
* feat: Add option to disable local auth form
* feat: add license info to Javascript files for LibreJS compatibility
* feat: add `FETCH_BILIBILI_WATCH_TIME` config option
* docs: update links to filtering rules
* chore: avoid using legacy key/value format in Dockerfile
* build(deps): bump `golang.org/x/oauth2` from `0.21.0` to `0.22.0`
* build(deps): bump `golang.org/x/net` from `0.27.0` to `0.28.0`
* build(deps): bump `golang.org/x/crypto` from `0.25.0` to `0.26.0`
* build(deps): bump `github.com/tdewolff/minify/v2` from `2.20.36` to `2.20.37`
* build(deps): bump `github.com/prometheus/client_golang`
* build: update GitHub Actions to Go 1.23
* build: publish OCI images only if `PUBLISH_DOCKER_IMAGES=true`
* build: bump Alpine Linux build image to v3.20
* build: add sha256 checksum file for published binaries
Version 2.1.4 (July 9, 2024)
----------------------------
* test: add unit tests for `IsModified()` behaviour
* refactor: improve YouTube page feed detection
* fix(ui): settings form is not populated correctly after validation errors
* fix(ui): playback speed indicator precision
* fix(ui): playback speed indicator on shared entries
* fix(integration): preserve existing Pinboard bookmarks
* fix(googlereader): set `CrawlTimeMsec` to the correct precision
* fix(build): failed to solve container image `arm64v8/golang:1.22-bookworm`
* fix(build): add `distroless` suffix on `latest` tag in GitHub workflow
* fix: use `ETag` as a stronger validator than `Last-Modified`
* fix: update `theverge.com` rewrite rule to avoid duplicate image
* fix: incorrect Go package comment `reader/readingtime`
* fix: error out for improper rewrite regexp when processing feed entries
* fix: ensures that session cookies are not expiring before the session is cleaned up from the database as per `CLEANUP_REMOVE_SESSIONS_DAYS`
* fix: `` aspect ratio with `height: auto`
* feat(ui): add `viewport-fit=cover`
* feat(sanitizer): add support for HTML hidden attribute
* feat(locale): update French translations
* feat(integration): add Raindrop integration
* feat(integration): add feed name to Telegram message
* feat(integration): add Betula integration
* feat: use of insecure TLS ciphers when "Allow self-signed or invalid certificates" is enabled to workaround some broken websites
* feat: discover feeds from a Youtube playlist pages
* feat: add navigation to last/first page
* feat: add global block and keep filters
* feat: add description field to feed settings
* feat: add `pitchfork.com` scraping rule
* feat: add `FETCH_NEBULA_WATCH_TIME` config option
* Bump `github.com/PuerkitoBio/goquery` from` 1.9.1` to` 1.9.2`
* Bump `github.com/prometheus/client_golang` from `1.19.0` to `1.19.1`
* build(deps): bump `library/alpine` in `/packaging/docker/alpine`
* build(deps): bump `golangci/golangci-lint-action` from `4` to `6`
* build(deps): bump `golang.org/x/term` from `0.19.0` to `0.22.0`
* build(deps): bump `golang.org/x/oauth2` from `0.19.0` to `0.21.0`
* build(deps): bump `golang.org/x/net` from `0.22.0` to `0.27.0`
* build(deps): bump `golang.org/x/crypto` from `0.24.0` to `0.25.0`
* build(deps): bump `github.com/yuin/goldmark` from `1.7.1` to `1.7.4`
* build(deps): bump `github.com/tdewolff/minify/v2` from `2.20.20` to `2.20.36`
* build(deps): bump `github.com/coreos/go-oidc/v3` from `3.10.0` to `3.11.0`
* build(deps): bump `docker/build-push-action` from `5` to `6`
Version 2.1.3 (April 27, 2024)
------------------------------
* `api`: `rand.Intn(math.MaxInt64)` causes tests to fail on 32-bit architectures (use `rand.Int()` instead)
* `ci`: use `docker/metadata-action` instead of deprecated shell-scripts
* `database`: remove `entries_feed_url_idx` index because entry URLs can exceeds btree index size limit
* `finder`: find feeds from YouTube playlist
* `http/response`: add brotli compression support
* `integration/matrix`: fix function name in comment
* `packaging`: specify container registry explicitly (e.g., Podman does not use `docker.io` by default)
* `packaging`: use `make miniflux` instead of duplicating `go build` arguments (this leverages Go's PIE build mode)
* `reader/fetcher`: add brotli content encoding support
* `reader/processor`: minimize feed entries HTML content
* `reader/rewrite`: add a rule for `oglaf.com`
* `storage`: change `GetReadTime()` function to use `entries_feed_id_hash_key` index
* `ui`: add seek and speed controls to media player
* `ui`: add tag entries page
* `ui`: fix JavaScript error when clicking on unread counter
* `ui`: use `FORCE_REFRESH_INTERVAL` config for category refresh
* Bump `github.com/tdewolff/minify/v2` from `2.20.19` to `2.20.20`
* Bump `golang.org/x/net` from `0.22.0` to `0.24.0`
* Bump `golang.org/x/term` from `0.18.0` to `0.19.0`
* Bump `golang.org/x/oauth2` from `0.18.0` to `0.19.0`
* Bump `github.com/yuin/goldmark` from `1.7.0` to `1.7.1`
Version 2.1.2 (March 30, 2024)
------------------------------
* `api`: rewrite API integration tests without build tags
* `ci`: add basic ESLinter checks
* `ci`: enable go-critic linter and fix various issues detected
* `ci`: fix JavaScript linter path in GitHub Actions
* `cli`: avoid misleading error message when creating an admin user automatically
* `config`: add `FILTER_ENTRY_MAX_AGE_DAYS` option
* `config`: bump the number of simultaneous workers
* `config`: rename `PROXY_*` options to `MEDIA_PROXY_*`
* `config`: use `crypto.GenerateRandomBytes` instead of doing it by hand
* `http/request`: refactor conditions to be more idiomatic
* `http/response`: remove legacy `X-XSS-Protection` header
* `integration/rssbrige`: fix rssbrige import
* `integration/shaarli`: factorize the header+payload concatenation as data
* `integration/shaarli`: no need to base64-encode then remove the padding when we can simply encode without padding
* `integration/shaarli`: the JWT token was declared as using HS256 as algorithm, but was using HS512
* `integration/webhook`: add category title to request body
* `locale`: update Turkish translations
* `man page`: sort config options in alphabetical order
* `mediaproxy`: reduce the internal indentation of `ProxifiedUrl` by inverting some conditions
* `mediaproxy`: simplify and refactor the package
* `model`: replace` Optional{Int,Int64,Float64}` with a generic function `OptionalNumber()`
* `model`: use struct embedding for `FeedCreationRequestFromSubscriptionDiscovery` to reduce code duplication
* `reader/atom`: avoid debug message when the date is empty
* `reader/atom`: change `if !a { a = } if !a {a = }` constructs into `if !a { a = ; if !a {a = }}` to reduce the number of comparisons and improve readability
* `reader/atom`: Move the population of the feed's entries into a new function, to make BuildFeed easier to understand/separate concerns/implementation details
* `reader/atom`: refactor Atom parser to use an adapter
* `reader/atom`: use `sort+compact` instead of `compact+sort` to remove duplicates
* `reader/atom`: when detecting the format, detect its version as well
* `reader/encoding`: inline a one-liner function
* `reader/handler`: fix force refresh feature
* `reader/json`: refactor JSON Feed parser to use an adapter
* `reader/media`: remove a superfluous error-check: `strconv.ParseInt` returns `0` when passed an empty string
* `reader/media`: simplify switch-case by moving a common condition above it
* `reader/processor`: compile block/keep regex only once per feed
* `reader/rdf`: refactor RDF parser to use an adapter
* `reader/rewrite`: inline some one-line functions
* `reader/rewrite`: simplify `removeClickbait`
* `reader/rewrite`: transform a free-standing function into a method
* `reader/rewrite`: use a proper constant instead of a magic number in `applyFuncOnTextContent`
* `reader/rss`: add support for `` element
* `reader/rss`: don't add empty tags to RSS items
* `reader/rss`: refactor RSS parser to use a default namespace to avoid some limitations of the Go XML parser
* `reader/rss`: refactor RSS Parser to use an adapter
* `reader/rss`: remove some duplicated code in RSS parser
* `reader`: ensure that enclosure URLs are always absolute
* `reader`: move iTunes and GooglePlay XML definitions to their own packages
* `reader`: parse podcast categories
* `reader`: remove trailing space in `SiteURL` and `FeedURL`
* `storage`: do not store empty tags
* `storage`: simplify `removeDuplicates()` to use a `sort`+`compact` construct instead of doing it by hand with a hashmap
* `storage`: Use plain strings concatenation instead of building an array and then joining it
* `timezone`: make sure the tests pass when the timezone database is not installed on the host
* `ui/css`: align `min-width` with the other `min-width` values
* `ui/css`: fix regression: "Add to Home Screen" button is unreadable
* `ui/js`: don't use lambdas to return a function, use directly the function instead
* `ui/js`: enable trusted-types
* `ui/js`: fix download button loading label
* `ui/js`: fix JavaScript error on the login page when the user not authenticated
* `ui/js`: inline one-line functions
* `ui/js`: inline some `querySelectorAll` calls
* `ui/js`: reduce the scope of some variables
* `ui/js`: remove a hack for "Chrome 67 and earlier" since it was released in 2018
* `ui/js`: replace `DomHelper.findParent` with `.closest`
* `ui/js`: replace `let` with `const`
* `ui/js`: simplify `DomHelper.getVisibleElements` by using a `filter` instead of a loop with an index
* `ui/js`: use a `Set` instead of an array in a `KeyboardHandler`'s member
* `ui/js`: use some ternaries where it makes sense
* `ui/static`: make use of `HashFromBytes` everywhere
* `ui/static`: set minifier ECMAScript version
* `ui`: add keyboard shortcuts for scrolling to top/bottom of the item list
* `ui`: add media player control playback speed
* `ui`: remove unused variables and improve JSON decoding in `saveEnclosureProgression()`
* `validator`: display an error message on edit feed page when the feed URL is not unique
* Bump `github.com/coreos/go-oidc/v3` from `3.9.0` to `3.10.0`
* Bump `github.com/go-webauthn/webauthn` from `0.10.1` to `0.10.2`
* Bump `github.com/tdewolff/minify/v2` from `2.20.18` to `2.20.19`
* Bump `google.golang.org/protobuf` from `1.32.0` to `1.33.0`
Version 2.1.1 (March 10, 2024)
-----------------------------
* Move search form to a dedicated page
* Add Readeck integration
* Add feed option to disable HTTP/2 to avoid fingerprinting
* Add `Enter` key as a hotkey to open selected item
* Proxify `video` element `poster` attribute
* Add a couple of new possible locations for feeds
* Hugo likes to generate `index.xml`
* `feed.atom` and `feed.rss` are used by enterprise-scale/old-school gigantic CMS
* Fix categories import from Thunderbird's OPML
* Fix logo misalignment when using languages that are more verbose than English
* Google Reader: Do not return a 500 error when no items is returned
* Handle RDF feeds with duplicated `` elements
* Sort integrations alphabetically
* Add more URL validation in media proxy
* Add unit test to ensure each translation has the correct number of plurals
* Add missing plurals for some languages
* Makefile: quiet `git describe` and `rev-parse` stderr: When building from a tarball instead of a cloned git repo, there would be two `fatal: not a git repository` errors emitted even though the build succeeds. This is because of how `VERSION` and `COMMIT` are set in the Makefile. This PR suppresses the stderr for these variable assignments.
* Makefile: do not force `CGO_ENABLED=0` for `miniflux` target
* Add GitHub Action pipeline to build packages on-demand
* Remove Golint (deprecated), use `staticcheck` and `golangci-lint` instead
* Build amd64/arm64 Debian packages with CGO disabled
* Update `go.mod` and add `.exe` suffix to Windows binary
* Add a couple of fuzzers
* Fix CodeQL workflow
* Code and performance improvements:
* Use an `io.ReadSeeker` instead of an `io.Reader` to parse feeds
* Speed up the sanitizer:
- Allow Youtube URLs to start with `www`
- Use `strings.Builder` instead of a `bytes.Buffer`
- Use a `strings.NewReader` instead of a `bytes.NewBufferString`
- Sprinkles a couple of `continue` to make the code-flow more obvious
- Inline calls to `inList`, and put their parameters in the right order
- Simplify `isPixelTracker`
- Simplify `isValidIframeSource`, by extracting the hostname and comparing it directly, instead of using the full url and checking if it starts with multiple variations of the same one (`//`, `http:`, `https://` multiplied by `/www.`)
- Add a benchmark
- Instead of having to allocate a ~100 keys map containing possibly dynamic values (at least to the go compiler), allocate it once in a global variable. This significantly speeds things up, by reducing the garbage
- Use constant time access for maps instead of iterating on them
- Build a ~large whitelist map inline instead of constructing it item by item (and remove a duplicate key/value pair)
- Use `slices` instead of hand-rolled loops
collector/allocator involvements.
* Reuse a `Reader` instead of copying to a buffer when parsing an Atom feed
* Preallocate memory when exporting to OPML: This should marginally increase performance when exporting a large amount of feeds to OPML
* Delay call of `view.New` after logging the user in: There is no need to do extra work like creating a session and its associated view until the user has been properly identified and as many possibly-failing sql request have been successfully run
* Use constant-time comparison for anti-csrf tokens: This is probably completely overkill, but since anti-csrf tokens are secrets, they should be compared against untrusted inputs in constant time
* Simplify and optimize `genericProxyRewriter`
- Reduce the amount of nested loops: it's preferable to search the whole page once and filter on it (even with filters that should always be false), than searching it again for every element we're looking for.
- Factorize the proxying conditions into a `shouldProxy` function to reduce the copy-pasta.
* Speed up `removeUnlikelyCandidates`: `.Not` returns a brand new `Selection`, copied element by element
* Improve `EstimateReadingTime`'s speed by a factor 7
- Refactorise the tests and add some
- Use 250 signs instead of the whole text
- Only check for Korean, Chinese and Japanese script
- Add a benchmark
- Use a more idiomatic control flow
* Don't compute reading-time when unused: If the user doesn't display reading times, there is no need to compute them. This should speed things up a bit, since `whatlanggo.Detect` is abysmally slow.
* Simplify `username` generation for the integration tests: No need to generate random numbers 10 times, generate a single big-enough one. A single int64 should be more than enough
* Add missing regex anchor detected by CodeQL
* Don't mix up slices capacity and length
* Use prepared statements for intervals, `ArchiveEntries` and `updateEnclosures`
* Use modern for-loops introduced with Go 1.22
* Remove a superfluous condition: No need to check if the length of `line` is positive since we're checking afterwards that it contains the `=` sign
* Close resources as soon as possible, instead of using `defer()` in a loop
* Remove superfluous escaping in a regex
* Use `strings.ReplaceAll` instead of `strings.Replace(…, -1)`
* Use `strings.EqualFold` instead of `strings.ToLower(…) ==`
* Use `.WriteString(` instead of `.Write([]byte(…`
* Use `%q` instead of `"%s"`
* Make `internal/worker/worker.go` read-only
* Use a switch-case construct in `internal/locale/plural.go` instead of an avalanche of `if`
* Template functions: simplify `formatFileSize` and `duration` implementation
* Inline some templating functions
* Make use of `printer.Print` when possible
* Add a `printer.Print` to `internal/locale/printer.go`: No need to use variadic functions with string format interpolation to generate static strings
* Minor code simplification in `internal/ui/view/view.go`: No need to create the map item by item when we can create it in one go
* Build the map inline in `CountAllFeeds()`: No need to build an empty map to then add more fields in it one by one
* Miscellaneous improvements to `internal/reader/subscription/finder.go`:
- Surface `localizedError` in `FindSubscriptionsFromWellKnownURLs` via `slog`
- Use an inline declaration for new subscriptions, like done elsewhere in the
file, if only for consistency's sake
- Preallocate the `subscriptions` slice when using an RSS-bridge,
* Use an update-where for `MarkCategoryAsRead` instead of a subquery
* Simplify `CleanOldUserSessions`' query: No need for a subquery, filtering on `created_at` directly is enough
* Simplify `cleanupEntries`' query
- `NOT (hash=ANY(%4))` can be expressed as `hash NOT IN $4`
- There is no need for a subquery operating on the same table, moving the conditions out is equivalent.
* Reformat `ArchiveEntries`'s query for consistency's sake and replace the `=ANY` with an `IN`
* Reformat the query in `GetEntryIDs` and `GetReadTime`'s query for consistency's sake
* Simplify `WeeklyFeedEntryCount`: No need for a `BETWEEN`: we want to filter on entries published in the last week, no need to express is as "entries published between now and last week", "entries published after last week" is enough
* Add some tests for `add_image_title`
* Remove `github.com/google/uuid` dependencies: Replace it with a hand-rolled implementation. Heck, an UUID isn't even a requirement according to Omnivore API docs
* Simplify `internal/reader/icon/finder.go`:
- Use a simple regex to parse data uri instead of a hand-rolled parser, and document what fields are considered mandatory.
- Use case-insensitive matching to find (fav)icons, instead of doing the same query twice with different letter cases
- Add `apple-touch-icon-precomposed.png` as a fallback `favicon`
- Reorder the queries to have `icon` first, since it seems to be the most popular one. It used to be last, meaning that pages had to be parsed completely 4 times, instead of one now.
- Minor factorisation in `findIconURLsFromHTMLDocument`
* Small refactoring of `internal/reader/date/parser.go`:
- Split dates formats into those that require local times and those who don't, so that there is no need to have a switch-case in the for loop with around 250 iterations at most.
- Be more strict when it comes to timezones, previously invalid ones like -13 were accepted. Also add a test for this.
- Bail out early if the date is an empty string.
* Make use of Go ≥ 1.21 slices package instead of hand-rolled loops
* Reorder the fields of the `Entry` struct to save some memory
* Dependencies update:
* Bump `golang.org/x/oauth2` from `0.17.0` to `0.18.0`
* Bump `github.com/prometheus/client_golang` from `1.18.0` to `1.19.0`
* Bump `github.com/tdewolff/minify/v2` from `2.20.16` to `2.20.18`
* Bump `github.com/PuerkitoBio/goquery` from `1.8.1` to `1.9.1`
* Bump `golang.org/x/crypto` from `0.19.0` to `0.20.0`
* Bump `github.com/go-jose/go-jose/v3` from `3.0.1` to `3.0.3`
Version 2.1.0 (February 17, 2024)
---------------------------------
* Add Linkwarden integration
* Add LinkAce integration
* Add `FORCE_REFRESH_INTERVAL` config option
* Add `item-meta-info-reading-time` CSS class
* Add `add_dynamic_iframe` rewrite function
* Add attribute `data-original-mos` to `add_dynamic_image` rewrite candidates
* Update entry processor to allow blocking/keeping entries by tags and/or authors
* Change default `Accept` header when fetching feeds
* Rewrite relative RSS Bridge URL to absolute URL
* Use numeric user ID in Alpine and distroless container image (avoid `securityContext` error in Kubernetes)
* Always try to use HTTP/2 when fetching feeds if available
* Add `type` attribute in OPML export as per OPML 2.0 specs
* Fix missing translation argument for the key `error.unable_to_parse_feed`
* Fix Debian package builder when using Go 1.22 and `armhf` architecture
* Fix typo in log message
* Fix incorrect label shown when saving an article
* Fix incorrect condition in refresh feeds cli
* Fix incorrect label `for` attribute
* Add missing label ID for custom CSS field
* Accessibility improvements:
* Add workaround for macOS VoiceOver that didn't announce `details` and `summary` when expanded
* Add `alert` role to alert message element
* Add a `h2` heading to the article element so that the screen reader users can navigate the article through the heading level
* Add an `aria-label` attribute for the article element for screen readers
* Remove the icon image `alt` attribute in feeds list to prevent screen reader to announce it before entry title
* Add `sr-only` CSS class for screen reader users (provides more context)
* Differentiate between buttons and links
* Change links that could perform actions to buttons
* Improve translation of hidden Aria elements
* Remove the redundant article role
* Add a search landmark for the search form so that the screen reader users can navigate to it
* Add skip to content link
* Add `nav` landmark to page header links
* Limit feed/category entry pagination to unread entries when coming from unread entry list
* Update German translation
* Update GitHub Actions to Go 1.22
* Bump `golang.org/x/term` from `0.16.0` to `0.17.0`
* Bump `github.com/google/uuid` from `1.5.0` to `1.6.0`
* Bump `github.com/yuin/goldmark` from `1.6.0` to `1.7.0`
* Bump `golang.org/x/oauth2` from `0.15.0` to `0.17.0`
* Bump `github.com/tdewolff/minify/v2` from `2.20.10` to `2.20.12`
* Bump `golang.org/x/term` from `0.15.0` to `0.16.0`
* Bump `github.com/prometheus/client_golang` from `1.17.0` to `1.18.0`
* Bump `github.com/tdewolff/minify/v2` from `2.20.9` to `2.20.16`
* Bump `golang.org/x/crypto` from `0.16.0` to `0.19.0`
* Bump `github.com/go-webauthn/webauthn` from `0.9.4` to` 0.10.1`
* Bump `golang.org/x/net` from `0.20.0` to `0.21.0`
Version 2.0.51 (December 13, 2023)
----------------------------------
* Add Omnivore integration
* Fixes for the regressions introduced in version 2.0.50:
* Ensure all HTML documents are encoded in UTF-8
* Send default User-Agent and HTTP caching headers when making HTTP requests
* Allow Youtube links to be opened outside the `iframe` (avoid `ERR_BLOCKED_BY_RESPONSE` error)
* Fix inaccessible metrics endpoint when listening on Unix socket
* Allow renaming and moving feed at the same time in the Google Reader API
* Log `nb_jobs` only when number of jobs is larger than 0 in background scheduler
* Deduplicate feed URLs when parsing HTML document during discovery process
* Calculate a virtual weekly count based on the average updating frequency (`POLLING_SCHEDULER=entry_frequency`)
* Update GitHub Actions workflow to be able to run the linter and tests on-demand
* Add `SCHEDULER_ROUND_ROBIN_MIN_INTERVAL` config option
* Add links to GitHub for the commit hash and the version in the about page
* Use "starred" rather than "bookmarked" in English translation
* Update Chinese (CN & TW) translation
* Bump `github.com/google/uuid` from `1.4.0` to `1.5.0`
* Bump `github.com/coreos/go-oidc/v3` from `3.7.0` to `3.9.0`
* Bump `github.com/tdewolff/minify/v2` from `2.20.6` to `2.20.9`
* Bump `github.com/go-webauthn/webauthn` from `0.8.6` to `0.9.4`
* Bump `golang.org/x/oauth2` from `0.14.0` to `0.15.0`
Version 2.0.50 (November 12, 2023)
----------------------------------
* Add WebAuthn / Passkey integration
* Add RSS-Bridge integration
* Take RSS TTL field into consideration to schedule next check date
* Show number of visible entries instead of number of read entries in feed list
* OpenID Connect: Redirect to configured user home page after successful authentication
* Google Reader API fixes:
* `user/{userID}/state/com.google/read` is missing in categories section for read entries
* Take `ExcludeTargets` into consideration in feed stream handler
* Allow iframes pointing to Twitch videos
* Filter feed entries based on URL or title
* Take into consideration `hide_globally` property defined for categories in `/v1/entries` API endpoint
* Add category ID to webhooks request body
* Update date parser to parse more invalid date formats
* Refactor feed discovery handler, and avoid an extra HTTP request if the URL provided is the feed
* Refactor HTTP Client and `LocalizedError` packages
* Refactor Batch Builder, and prevent accidental and excessive refreshes from the web UI
* Refactor icon finder:
- Continue the discovery process when the feed icon is invalid
- Search all icons from the HTML document and do not stop on the first one
* Add support for SVG icons with data URL without encoding
* Expose `next_check_at` in the web ui and API
* Add database indexes to improve performance
* Change log level to warning for failed feeds refresh in cronjob
* Do not log website without icon as warning
* Add GitHub workflow to build binaries
* Add GitHub extensions to devcontainer
* Make sure to pull the latest base image when building the Docker image
* Strip version prefix when building Debian package
* Add `github-cli` and `docker-outside-of-docker` features to devcontainer
* Bump `golang.org/x/*` dependencies
* Bump `github.com/gorilla/mux` from `1.8.0` to `1.8.1`
* Bump `github.com/tdewolff/minify/v2` from `2.19.9` to `2.20.6`
* Bump `github.com/yuin/goldmark` from `1.5.6` to `1.6.0`
* Bump `github.com/coreos/go-oidc/v3` from `3.6.0` to `3.7.0`
Version 2.0.49 (October 15, 2023)
---------------------------------
* Implement structured logging using `log/slog` package. New config options available:
* `LOG_FORMAT`: `json` or `text`
* `LOG_LEVEL`: `debug`, `info`, `warning`, or `error`
* `LOG_FILE`: `sdterr`, `stdout`, or a file path
* The `DEBUG` option is now deprecated in favor of `LOG_LEVEL`
* API Improvements:
* Add endpoint `/v1/version`
* Add endpoint `PUT /v1/entries` to update entry title and content
* Add endpoint `/v1/icons/{iconID}`
* Add endpoint `/v1/flush-history` to flush history
* Make the category optional when creating feeds for API clients who don't support categories
* Add enclosures to `GET /v1/entries` endpoint
* Add `published_after`, `published_before`, `changed_after` and `changed_before` options to `/v1/entries` endpoint
* Telegram integration improvements:
* Replace feed HTML link with a button to avoid page preview issues
* Add the possibility to disable buttons
* Add Bruno Miniflux API collection in `contrib` folder (Bruno is an open source alternative to Postman/Insomnia)
* Add command line argument to export user feeds as OPML
* Add new rewrite rules `add_hn_links_using_hack` and `add_hn_links_using_opener` to open HN comments with iOS apps
* Fix timestamp format for `Expires` response header
* Fix Javascript error when reading time option is disabled
* Fix Apprise logic to handle feed service URLs
* Fix missing word in force refresh message
* Remove deprecated `PreferServerCipherSuites` TLS option
* Replace `github.com/rylans/getlang` with `github.com/abadojack/whatlanggo` because `getlang` doesn't seems to be updated anymore
* Bump `github.com/mccutchen/go-httpbin/v2` from `2.11.0` to `2.11.1`
* Bump `golang.org/x/*` dependencies
Version 2.0.48 (September 15, 2023)
-----------------------------------
* Add generic webhook integration
* Send webhook events when new entries are detected
* Send wehbook events when saving an entry
* Sign the outgoing requests with HMAC-SHA256
* Improve Telegram integration
* Add built-in Telegram client
* Remove dependency on `go-telegram-bot-api` library
* Add new options:
* Optional topic ID
* Disable page preview
* Disable notifications
* Add new button to go to article
* Improve Matrix integration
* Add built-in Matrix client
* Remove dependency on `gomatrix` library
* Send HTML formatted messages to Matrix
* OpenID Connect authentication improvements:
* Add OAuth2 PKCE support
* Add `profile` scope to OIDC integration to support accounts without email address
* Prevent empty username when using the OIDC integration
* Add `factor` for `entry_frequency` scheduler:
* Allow the user to increase the frequency of the `entry_frequency`
scheduler by a configurable factor in order to shorten the time between
updates.
* Fix: status bar is unreadable when using PWA in dark mode on Firefox Android
* Group form fields into fieldsets to improve page layout
* Update Russian translation
* Make sure icon URLs are always absolute
* Add Apprise service URLs per feed
* Trim `username` and `password` form fields
* Strip HTML tags from DublinCore Creator tags
* Fix scroll up behavior on Firefox Android
* Add missing `return` statement in `fetchContent` UI handler
* Add `replace_title` rewrite rule to adjust entry titles
* Fix Pocket integration redirect URL and Google Reader API HREF
* Fix feed `hide_globally` property to use it with third-party clients.
Version 2.0.47 (August 20, 2023)
--------------------------------
* Update rules for `webtoons.com`
* Use HTTP client from the standard library for third-party integrations
* Rename internal `url` package to `urllib` to avoid overlap with `net/url`
* Add Shaarli integration
* Add Shiori integration
* Add Apprise integration
* Add Readwise Reader integration
* Consider base path when generating third-party services API endpoint
* Use podcast duration tag as reading time
* Move internal packages to an `internal` folder
* For reference:
* Rename Miniflux package name to follow Go module naming convention
* For reference:
* Update RockyLinux image from 8 to 9 (used to build RPM package)
* Add force refresh in feed edit and feed entries page
* Use Odysee video duration as read time
* Upgrade to Go 1.21
* Use details disclosure element to show the list of third-party services
* Use Web Share API for sharing entry
* Add a workaround for parsing some invalid date format
* Add Thunder Client API collection into contrib folder
* Add new API endpoint: `/entries/{entryID}/save`
* Trigger Docker and packages workflows only for semantic tags
* Go module versioning expect Git tags to start with the letter v.
* The goal is to keep the existing naming convention for generated artifacts and
have proper versioning for the Go module.
* Bump `golang.org/x/*` dependencies
* Bump `github.com/yuin/goldmark`
* Bump `github.com/tdewolff/minify/v2`
* Bump `github.com/mccutchen/go-httpbin/v2`
Version 2.0.46 (July 21, 2023)
------------------------------
* Add scraper and rewrite rules for Webtoons
* Fix regression in integration page and simplify SQL query
* Wallabag integration: add more information in log messages
* Add support for custom Youtube embed URL
* Fix accessibility issues in modal component
* Fix modal aria role
* Trap focusing with tab / shift+tab inside the modal
* Restore keyboard focus when closing modal
* Automatically move keyboard focus to first focusable element unless specified otherwise
* Keyboard shortcut help modal: move keyboard focus to modal title
* Keyboard shortcut help modal: change close control from link to button
* Add Notion integration
* Update `golang.org/x/*` dependencies and `go-oidc` to v3.6.0
* Improve responsive design
* Add user setting for marking entry as read on view
* Improve Russian translation
* Add the possibility to run cleanup tasks from the command line
* Add the possibility to run Miniflux as a cronjob
* Use `go-httpbin` to run tests locally and avoid remote calls to `httpbin.org`
* Display tags when viewing entries
* Update categories API endpoint to return `total_unread` and `feed_count`
* Improve date parser to handle various broken date formats
* Avoid `pq: time zone displacement out of range` errors
* Improve entry existance check to make better use of index
* Add unique index `enclosures_user_entry_url_idx`
* Add mark as unread for Linkding integration
* Add sub-folder support for Wallabag integration
* Use RockyLinux to build RPM package
* Disable CGO when building RPM package
* Disable CGO when building Docker images
Version 2.0.45 (June 21, 2023)
------------------------------
* Add media player to listen to audio and video podcasts with the possiblity to resume to last playback position
* Add default tag names for Linkding integration
* Mark only globally visible entries when marking all entries from UI
* Use image included in feed as feed icon when available
* Order history by `changed_at` and `published_at`
* Remove title attribute from entry title links
* Fix reading time that is not aligned correctly with the latest version of Safari
* Use glyphs of the same size on keyboard shortcuts page
* Add maskable versions of the PWA icon
* Replace copyright header with SPDX identifier
* Remove the "í" letter from the Portuguese "lido" word
* Increase golangci-lint timeout value
* Bump `github.com/tdewolff/minify/v2`, `github.com/prometheus/client_golang`, `golang.org/x/*` dependencies
Version 2.0.44 (May 6, 2023)
----------------------------
* Add link to the URL rewrite rules documentation
* Update scraping rules for `ilpost.it`
* Update rewrite rules for `theverge.com`
* Add a rewrite rule to remove clickbait titles
* Make sure `PROXY_IMAGES` option is backward compatible with `PROXY_OPTION` and `PROXY_MEDIA_TYPES`
* Add new rule to remove tables
* Add support for searching well-known URLs in subdirectory
* Add CSS `word-wrap` rule to break very long entry title into multiple lines
* Add swipe as option for gesture navigation between entries. There are now 3 possible choices: `none`, `double-tap`, and `swipe`.
* Prefer typographic punctuation in English translation
* Process older entries first:
- Feed entries are usually ordered from most to least recent.
- Processing older entries first ensures that their creation timestamp
is lower than that of newer entries.
- This is useful when we order by creation, because then we get a
consistent timeline.
* Fix Grafana dashboard
* Push Docker images to `Quay.io` (RedHat)
* Bump `golang.org/x/*`, `github.com/lib/pq`, `mvdan.cc/xurls/v2` and `github.com/prometheus/client_golang` dependencies
Version 2.0.43 (March 16, 2023)
-------------------------------
* Avoid XSS when opening a broken image due to unescaped ServerError in proxy handler (CVE-2023-27592)
Creating an RSS feed item with the inline description containing an `` tag
with a `srcset` attribute pointing to an invalid URL like
`http:a`, we can coerce the proxy handler into an error
condition where the invalid URL is returned unescaped and in full.
This results in JavaScript execution on the Miniflux instance as soon as the
user is convinced to open the broken image.
* Use `r.RemoteAddr` to check `/metrics` endpoint network access (CVE-2023-27591)
HTTP headers like `X-Forwarded-For` or `X-Real-Ip` can be easily spoofed. As
such, it cannot be used to test if the client IP is allowed.
The recommendation is to use HTTP Basic authentication to protect the
metrics endpoint, or run Miniflux behind a trusted reverse-proxy.
* Add HTTP Basic authentication for `/metrics` endpoint
* Add proxy support for several media types
* Parse feed categories from RSS, Atom and JSON feeds
* Ignore empty link when discovering feeds
* Disable CGO explicitly to make sure the binary is statically linked
* Add CSS classes to differentiate between category/feed/entry view and icons
* Add rewrite and scraper rules for `blog.cloudflare.com`
* Add `color-scheme` to themes
* Add new keyboard shortcut to toggle open/close entry attachments section
* Sanitizer: allow `id` attribute in `` element
* Add Indonesian Language
* Update translations
* Update Docker Compose examples:
- Run the application in one command
- Bring back the health check condition to `depends_on`
- Remove deprecated `version` element
* Update scraping rules for `ilpost.it`
* Bump `github.com/PuerkitoBio/goquery` from `1.8.0` to `1.8.1`
* Bump `github.com/tdewolff/minify/v2` from `2.12.4` to `2.12.5`
* Bump `github.com/yuin/goldmark` from `1.5.3` to `1.5.4`
* Bump `golang.org/x/*` dependencies
Version 2.0.42 (January 29, 2023)
---------------------------------
* Fix header items wrapping
* Add option to enable or disable double tap
* Improve PWA display mode label in settings page
* Bump `golang.org/x/*` dependencies
* Update translations
* Add scraping rule for `ilpost.it`
* Update reading time HTML element after fetching the original web page
* Add category feeds refresh feature
Version 2.0.41 (December 10, 2022)
----------------------------------
* Reverted PR #1290 (follow the only link) because it leads to several panics/segfaults that prevent feed updates
* Disable double-tap mobile gesture if swipe gesture is disabled
* Skip integrations if there are no entries to push
* Enable TLS-ALPN-01 challenge for ACME
- This type of challenge works purely at the TLS layer and is compatible
with SNI proxies. The existing HTTP-01 challenge support has been left
as-is.
* Preconfigure Miniflux for GitHub Codespaces
* Updated `golang.org/x/net/*` dependencies
Version 2.0.40 (November 13, 2022)
----------------------------------
* Update dependencies
* Pin Postgres image version in Docker Compose examples to avoid unexpected upgrades
* Make English and Spanish translation more consistent:
- Use "Feed" everywhere instead of "Subscription"
- Use "Entry" instead of "Article"
* Allow Content-Type and Accept headers in CORS policy
* Use dirs file for Debian package
* Use custom home page in PWA manifest
* Fix scraper rule that could be incorrect when there is a redirect
* Improve web scraper to fetch the only link present as workaround to some landing pages
* Add Matrix bot integration
* Proxify images in API responses
* Add new options in user preferences to configure sorting of entries in the category page
* Remove dependency on `github.com/mitchellh/go-server-timing`
* Add support for the `continuation` parameter and result for Google Reader API ID calls
* Use automatic variable for build target file names
* Add rewrite rule for `recalbox.com`
* Improve Dutch translation
Version 2.0.39 (October 16, 2022)
---------------------------------
* Add support for date filtering in Google Reader API item ID calls
* Handle RSS entries with only a GUID permalink
* Go API Client: Accept endpoint URLs ending with `/v1/`
* CORS API headers: Allow `Basic` authorization header
* Log feed URL when submitting a subscription that returns an error
* Update `make run` command to execute migrations automatically
* Add option to send only the URL to Wallabag
* Do not convert anchors to absolute links
* Add config option to use a custom image proxy URL
* Allow zoom on mobile devices
* Add scraping rules for `theverge.com`, `royalroad.com`, `swordscomic.com`, and `smbc-comics.com`
* Add Ukrainian translation
* Update `golang.org/x/*` dependencies
* Bump `github.com/tdewolff/minify/v2` from `2.12.0` to `2.12.4`
* Bump `github.com/yuin/goldmark` from `1.4.13` to `1.5.2`
* Bump `github.com/lib/pq` from `1.10.6` to `1.10.7`
Version 2.0.38 (August 13, 2022)
--------------------------------
* Rename default branch from master to main
* Update GitHub Actions
* Bump `github.com/prometheus/client_golang` from `1.12.2` to `1.13.0`
* Fix some linter issues
* Handle Atom links with a text/html type defined
* Add `parse_markdown` rewrite function
* Build RPM and Debian packages automatically using GitHub Actions
* Add `explosm.net` scraper rule
* Make default home page configurable
* Add title attribute to entry links because text could be truncated
* Highlight categories with unread entries
* Allow option to order by title and author in API entry endpoint
* Update Russian translation
* Make reading speed user-configurable
* Added translation for Hindi language used in India
* Add rewrite rules for article URL before fetching content
* Bump `github.com/tdewolff/minify/v2` from `2.11.7` to `2.12.0`
* Support other repo owners in GitHub Docker Action
* Proxify empty URL should not crash
* Avoid stretched image if specified width is larger than Miniflux's layout
* Add support for OPML files with several nested outlines
* sanitizer: handle image URLs in `srcset` attribute with comma
* Allow `width` and `height` attributes for `img` tags
* Document that `-config-dump` command line argument shows sensitive info
* Add System-V init service in contrib folder
* Fix syntax error in `RequestBuilder.getCsrfToken()` method
Version 2.0.37 (May 27, 2022)
-----------------------------
* Add rewrite rule to decode base64 content
* Add Linkding integration
* Add comment button to Telegram message
* Add API endpoint to fetch unread and read counters
* Fixes logic bug in Google Reader API sanity check
* Reduce number of CORS preflight check to save network brandwidth
* Add Espial integration
* Allow API search for entries which are not starred
* Try to use outermost element text when title is empty
* Make swipe gestures feel more natural
- Removes opacity transition when swiping an article read/unread
- Adds "resistance" to the swiped entry when the 75px threshold is
reached
- Fixes an issue in which a swiped article couldn't be moved <15px
* Add support for feed streams to Google Reader API IDs API
* Fix invalid parsing of icon data URL
* Add Traditional Chinese translation
* Add distroless Docker image variant
* Add Go 1.18 to GitHub Action
* Bump `github.com/tdewolff/minify/v2` from `2.10.0` to `2.11`
* Bump `github.com/prometheus/client_golang` from `1.12.1` to `1.12.2`
* Bump `github.com/lib/pq` from `1.10.4` to `1.10.6`
Version 2.0.36 (March 8, 2022)
------------------------------
* Gray out pagination buttons when they are not applicable
* Use truncated entry description as title if unavailable
* Do not fallback to InnerXML if XHTML title is empty
* Add `+` keyboard shortcut for new subscription page
* Add `(+)` action next to Feeds to quickly add new feeds
* Fix unstar not working via Google Reader API
* Remove circles in front of page header list items
* Fix CSS hover style for links styled as buttons
* Avoid showing `undefined` when clicking on read/unread
* Add new keyboard shortcut `M` to toggle read/unread, and go to previous item
* Add several icons to menus according to their roles
* Add missing event argument to `onClick()` function call
* Add links to scraper/rewrite/filtering docs when editing feeds
* Add a rewrite rule for Castopod episodes
* Fix regression: reset touch-item if not in `/unread` page
* Add API endpoint to fetch original article
* Show the category first in feed settings
* Add pagination on top of all entries
* Display Go version in "About" page
* Bump `mvdan.cc/xurls/v2` from 2.3.0 to 2.4.0
* Bump `github.com/prometheus/client_golang` from 1.11.0 to 1.12.1
* Bump `github.com/tdewolff/minify/v2` from 2.9.28 to 2.10.0
Version 2.0.35 (January 21, 2022)
---------------------------------
* Set `read-all` permission to `GITHUB_TOKEN` for GitHub Actions
* Pin `jshint` version in linter job
* Fix incorrect conversion between integer types
* Add new GitHub Actions workflows: CodeQL and Scorecards analysis
* Handle Atom feeds with space around CDATA
* Bump `github.com/tdewolff/minify/v2` from 2.9.22 to 2.9.28
* Add Documentation directive to Systemd service
* Do not reset `touch-item` if successfully swiped
* Add support for multiple authors in Atom feeds
* Omit `User-Agent` header in image proxy to avoid being blocked
* Use custom feed user agent to fetch website icon
* Make default Invidious instance configurable
* Add new rewrite rule `add_youtube_video_from_id` to add Youtube videos in Quanta articles
* Add scrape and rewrite rules for `quantamagazine.org`
* Expose entry unshare link in the entry and list views
* Add Google Reader API implementation (experimental)
* Add `Content-Security-Policy` header to feed icon and image proxy endpoints
- SVG images could contain Javascript. This CSP blocks inline script.
- Feed icons are served using `` tag and Javascript is not interpreted.
* Add Finnish translation
* Add scraper rule for `ikiwiki.iki.fi`
* Remove `SystemCallFilter` from `miniflux.service`
* Fix minor typo in French translation
Version 2.0.34 (December 16, 2021)
----------------------------------
* Add rewrite rule for comics website http://monkeyuser.com
* Add `` tag to OPML export
* Tighten Systemd sandboxing and update comments in `miniflux.service`
* Add `RuntimeDirectory` to Systemd service
* Order disabled feeds at the end of the list
* Add support for theme color based on preferred color scheme of OS
* Bump `github.com/lib/pq` from 1.10.3 to 1.10.4
* Bump `github.com/PuerkitoBio/goquery` from 1.7.1 to 1.8.0
* Fix typos in `model/icon.go`
* Add `data-srcset` support to `add_dynamic_image rewrite` rewrite rule
* Fix Docker Compose example files compatibility to v3
* Added the `role="article"` to `` elements for better accessibility with screen readers
* Redact secrets shown on the about page
* Handle `srcset` images with no space after comma
* Hide the logout link when using auth proxy
* Fix wrong CSS variable
* Change `-config-dump` command to use `KEY=VALUE` format
Version 2.0.33 (September 25, 2021)
-----------------------------------
* Build RPM and Debian package with PIE mode enabled
* Add CSS rule to hide `` tag in old browsers
* Bump `github.com/tdewolff/minify/v2 from 2.9.21 to 2.9.22`
* Bump `github.com/lib/pq from 1.10.2 to 1.10.3`
* Remove `RequestURI()` hack
* Improve `zh_CN` translation
* Add ability to change entry sort order in the UI
* Add minor improvements in integration package
* Add Telegram integration
* Add rewrite rule to remove DOM elements
* Add proxy argument to `scraper.Fetch()`
* Add mime type `application/feed+json` to discover JSON Feed v1.1
* Update scraper rule for `theregister.com`
* Add Go 1.17 to GitHub Actions
* Display option to hide feed only when category is not already hidden
* Add option to hide feeds from the global Unread list
Version 2.0.32 (August 14, 2021)
--------------------------------
* Bump `github.com/tdewolff/minify/v2` from 2.9.17 to 2.9.21
* Bump `mvdan.cc/xurls/v2` from 2.2.0 to 2.3.0
* Bump `github.com/PuerkitoBio/goquery` from 1.6.1 to 1.7.1
* Bump `github.com/prometheus/client_golang` from 1.10.0 to 1.11.0
* Add `/rss/` to the list of well known URLs during feed discovery
* Use `authors` entry for JSON 1.1 feeds
* Added Greek translation
* Added the ability to mark an entire category as read in the web ui
* Added "in" in "logged in" for en_US `tooltip.logged_user`
* Added option to hide categories from the global unread list
* Show "saving" labels for entry status button
* Golang client: Try to parse response body on `InternalServerError` errors
* contrib: Add support for a $MINIFLUX_IMAGE env var in docker-compose
* contrib: Bump docker-compose version to 3.4
Version 2.0.31 (June 6, 2021)
-----------------------------
* Expose comments_url entry field in Golang API client
* Use unique file names for cache busting instead of query string
* Highlight and sort feeds with unread entries in feeds list
* Mark items as read on click/middle click of external links
* Fix: Firefox on Windows does not show the active link as bold
* Avoid extra HTTP request for fetching custom stylesheet
* Remove invalid CSRF HTML meta tag
* Add lang attribute to root HTML tag
* Use runes instead of bytes to truncate JSON feed titles (avoid breaking Unicode strings)
* Expose changed_at time through the API
* Add new config option CLEANUP_ARCHIVE_BATCH_SIZE
* Add new option DATABASE_CONNECTION_LIFETIME
* Add database stats to Prometheus exporter
* Add Systemd watchdog
* Avoid custom stylesheet to be cached by third-party CDN
* Update a shared entry label translation in zh_CN
* Bump github.com/tdewolff/minify/v2 from 2.9.16 to 2.9.17
* Bump github.com/lib/pq from 1.10.1 to 1.10.2
Version 2.0.30 (May 7, 2021)
----------------------------
* Security fix: any user can delete any feed (Regression introduced in commit 51fb949)
* Fix password reset via CLI
* Increase default batch size value
* Handle RSS feed title with encoded Unicode entities
* Show number of unread per category in category list instead of number of feeds
* Bump github.com/lib/pq from 1.10.0 to 1.10.1
* Filtering doesn't work when selecting from multiple found feeds
* Bump github.com/tdewolff/minify/v2 from 2.9.15 to 2.9.16
* Use an appropriate color for visited links on dark theme
* Fix typo in reader/json/doc.go
* Create SECURITY.md
* Setup golangci-lint Github Action
* Add per feed cookies option
* Bump github.com/prometheus/client_golang from 1.9.0 to 1.10.0
* Bump github.com/tdewolff/minify/v2 from 2.9.13 to 2.9.15
Version 2.0.29 (Mar 21, 2021)
-----------------------------
* Miniflux requires at least Go 1.16 now
* Improved support of Atom text constructs
- Improve handling of CDATA in text elements
- Omit XHTML root element because it should not be part of the content
- Fix incorrect parsing of HTML elements
* Handle RDF feed with HTML encoded entry title
* Add Turkish language
* Improve deletion of feeds with lots of entries
* Add support of Systemd readiness notification using the sd_notify protocol
* Remove feed_icons service worker cache because it's causing more problems than it solves (and HTTP cache seems faster)
* Add basic PWA offline page
- Add basic offline mode when using the service worker
- Starting in Chrome 93, offline mode is going to be a requirement to install the PWA
* Replace icon for "Add to home screen" button
* Use SVG icons for "toast" notifications
* Use SVG sprite for icons instead of inline elements
* Reset scroll position on mark page as read
* Add link to mark all feed entries as read
* Make web app display mode configurable (The change is visible after reinstalling the web app)
* Handle RSS feeds with CDATA in author item element
* Add read time on the article page
* Avoid showing a broken image when there is no feed icon
* Add option to allow self-signed or invalid certificates
* Add new config option to scrape YouTube's website to get video duration as read time (disabled by default)
* Send full article content to Wallabag
* Add more extensive health check support
- Improve endpoint to test database connection
- Add new cli argument: -healthcheck
- Update Docker Compose examples
* Update iframe "allow list" to support Bilibili videos
* Remove completely generated files and use embed package to bundle JS/CSS/Images/Translations files into the final binary
* Remove deprecated io/ioutil package
* Show Postgres version in "About" page
Version 2.0.28 (Feb 15, 2021)
-----------------------------
* Add HTTP header "Referrer-Policy: no-referrer"
* Handle entry title with double encoded entities
* Add Open Containers annotations to Docker image
* Remove iframe inner HTML contents (iframe element never has fallback content)
* Update date parser to fix another time zone issue
* Update German translation for blocklist and keeplist
* Validate Keep list and Block list rules syntax
* Add support for IPv6 with zone index
* Allow images with data URLs
* Limit full-text search indexation to first 500K characters (tsvector has a size limit of 1MB)
* Change PWA display mode to standalone
* ETag value is not set correctly in HTTP client (regression)
* Add database backed Let's Encrypt certificate cache
* Add global option POLLING_PARSING_ERROR_LIMIT
* Update systemd service file comments to use `systemctl edit` for editing
* Update Go version to 1.15 in go.mod
* Don't discard the "Fetch via Proxy" option
* Update man page to show the default values
* Add PostgreSQL indices
* Add API endpoints to get feeds and entries of a category
* Create feed query builder
* Bump github.com/PuerkitoBio/goquery from 1.6.0 to 1.6.1
* Show global options in the about page
* Update man page to mention -1 can be used for CLEANUP_ARCHIVE_* options
Version 2.0.27 (Jan 9, 2021)
----------------------------
* Add spellcheck="false" to input fields
* Refactoring of entry, feed, category, and user validation
* Avoid stripping tags for entry title
* Add the possibility to subscribe to feeds with the Android Share menu
* API improvements:
- Change feed creation request to allow setting most fields via API
- Allow regular users to change settings via API
- Make user fields editable via API
- Renaming non-existent category via API should return a 404
* Update Systemd service file:
- Add capability CAP_NET_BIND_SERVICE (allow the process to listen on privileged ports)
- Enable a private /tmp for $CERT_CACHE (required when using Let's Encrypt)
* Update read/star icons to SVGs
* Add autocomplete="username" to HTML forms
* Improve user mass delete to use fewer Goroutines
* Use SQL transaction when creating user sessions and users
* Remove extra column (HSTORE field) from users table and migrate key/value pairs to specific columns
* Bump github.com/prometheus/client_golang from 1.8.0 to 1.9.0
* Bump github.com/lib/pq from 1.8.0 to 1.9.0
* Add styles for HTML tag
* Refactor SQL migrations:
- Avoid embedding SQL files into binary
- Allow more flexible changes by using Go functions
* Add Server-Timing header to unread page
* Show correct User Agent in input placeholders
* Add autocomplete attribute to login form
* Add Grafana dashboard in contrib folder
Version 2.0.26 (Dec 5, 2020)
----------------------------
* Use created_at instead of published_at for archiving entries
* Add created_at field for entries
* Handle invalid feeds with relative URLs
* Add API routes for "mark all as read"
* Add support for setting a global default User-Agent
* Add rewrite rule "replace" for custom search and replace
* Calculate reading time during feed processing
* Handle various invalid dates
* systemd: keep /run writeable
* debian package: add missing post-install script
* Do not follow redirects when trying known feed URLs
* Trim spaces around icon URLs
* Reinstate EXPOSE instruction in Dockerfile
* Update German and Portuguese translations
Version 2.0.25 (Nov 3, 2020)
----------------------------
* Rename "original" link to be more explicit
* Do not escape HTML for Atom 1.0 text content during parsing (Avoid HTML entities issues)
* Do not use charset.NewReader if the body is a valid UTF-8 document
* Restore the ability to use a proxy for all HTTP requests (see https://golang.org/pkg/net/http/#ProxyFromEnvironment)
* Show Git commit in about page
* Publish Docker images to GitHub Container Registry
* Added few Docker Compose examples in contrib folder
* Added Ansible Role + Playbook for Miniflux in contrib folder
* Add rewrite rule to use noscript content for images rendered with Javascript
* Bump github.com/prometheus/client_golang from 1.7.1 to 1.8.0
* Update contributor link and Godoc badge for API client
* Move Debian package builder to main repository
* Move RPM build files to main repository
* Add GitHub Action to generate Docker images
* Build multi-platform images with Docker Buildx
* Add keyboard shortcut to scroll current item to the top
* Add feed filters (Keeplist and Blocklist)
* Do not proxy image with a data URL
* Bump github.com/PuerkitoBio/goquery from 1.5.1 to 1.6.0
* Proxify articles crawled manually
* Proxify images defined in srcset attribute
* Remove plaintext Fever password from database
* Add keyboard shortcut to jump to an item's feed page
* Add option for swipe gesture on entries on mobile
Version 2.0.24 (Oct 3, 2020)
----------------------------
* Add rewrite rule to fix Medium.com images
* Update sanitizer to support responsive images:
- Add support for picture HTML tag
- Add support for srcset, media, and sizes attributes to img and source tags
* Enhance man page formatting
* Add Prometheus exporter
* Remove dependency on global config options in HTTP client
* API:
- Avoid database lookup if empty credentials are provided
- Add the possibility to filter entries by category ID
- Add the possibility to filter entries by a list of statuses
* Add Feed ID in worker error logs
* Tweak default HTTP client transport timeout values to reduce the number of file descriptors
* CSS tweaks and optimizations:
- Prevent sub and sup from affecting line-height
- Set touch-action on .items to prevent browser navigation
- Move font-family specific CSS to the appropriate file
- Update primary font-family for UI to be even more compatible with various operating systems
- Make .entry-content font-weight variable depending on font-family used
* Avoid Javascript minifier to break keyboard shortcuts
* Rename service worker script to avoid being blocked by uBlock
* Update date parser to handle Pacific Daylight Time in addition to Pacific Standard Time
* Create index to speed up bookmark page
* Do not try to update a duplicated feed after a refresh
* Use a transaction to refresh and create entries
* Speed up entries clean up with an index and a goroutine
* Avoid the accumulation of enclosures by keeping only what is referenced in the feed
* Add workarounds for parsing an invalid date
* Archive older entries first
* Update API client to support more filters
* Avoid duplication between get feed entries and get entries API endpoints
* Enable strict slash to avoid a page not found (404) when using a trailing slash in the URLs
* Add a submit button to each section of the integration page
* Reload page after making page as read when showing unread entries
* Add option to archive unread entries
* Add option to enable maintenance mode
* Add HTTP proxy option for subscriptions
* Make add_invidious_video rule applicable for different invidious instances
* Fix reading time for jp, ko and zh languages
* Update POLLING_SCHEDULER description in man page
* Bump gorilla/mux from 1.7.4 to 1.8.0
* Add link to mark a feed as read
Version 2.0.23 (Aug 15, 2020)
-----------------------------
* Try known URLs when discovering subscriptions
* Add workarounds to find YouTube channel feeds (YouTube doesn't expose RSS links anymore for new-style URLs)
* Increase HTTP server timeout values
* Use stdlib constants for HTTP methods instead of strings
* Add support for RTL feed content
* Ignore to avoid overriding the default title if they are different
* Add support for secret keys exposed as a file (useful for containerized environments)
* Display recent entries first in search results
* Do not archive shared items
* Add option to change the number of entries per page
* Add Brazilian Portuguese (pt_BR) translation
* Add reading time for entries
* Redirect to login page if CSRF token is expired
* Fever API:
- Use getEntryIDs instead of getEntries to reduce memory consumption
- Fix max_id argument logic to follow the specs
- Improve logging
- Do not send articles to external services when unsaving an item
- Create index to speed up API calls
- Log client IP in middleware
* API client: Do not return body for response with no content
* REST API:
- Delete users asynchronously (Deleting large users might lock the tables)
- Add CORS support
* Align entry actions to the left
- Attempt to avoid awkward alignment on smartphone screens
- Keep the read/star actions aligned to the left
- Remove CSS flex to allow easier override with custom CSS
* Upgrade Postgres client library
* Upgrade CI checks to Go 1.15
Version 2.0.22 (Jun 19, 2020)
-----------------------------
* Remove child-src CSP policy (deprecated)
* Add /version endpoint
* Add the ability to use custom css
* Handle more invalid dates
* Add CSS styles for textarea
* Add index to speed up slow query
* Speed up feed list page rendering
* Add alternative scheduler based on the number of entries
* Setup Dependabot on GitHub
* Update Docker image to Alpine 3.12
* Add feed option to ignore HTTP cache
* Fix some Italian and German translations
* Added scraper rule for RayWenderlich.com, TheOatmeal.com, financialsamurai.com, dilbert.com and turnoff.us
* Replace link to categories by a link to the list of entries in "Category > Feeds" page
* Change feed title to a link to the original website
* Add icons to feeds and categories list
* Update dependencies and remove vendor folder
Version 2.0.21 (Mar 28, 2020)
-----------------------------
* Add SVG icons to entry actions
* Add support for Invidious
- Embed Invidious player for invidio.us feeds
- Add new rewrite rule to use Invidious player for Youtube feeds
* Check during application startup if the database schema is up to date
* Change default theme for public pages to "System Serif"
* Add feature to share an article (create a public link of a feed entry)
* Fix SQL injection in full-text search rank ordering
* Add generic OpenID Connect provider (OAuth2)
* Use more secure TLS configuration for autocert server (increase SSL Labs score from B to A+)
* Add feature to create per-application API Keys
* Add Go 1.14 to GitHub Actions
* Add scraper rule for wdwnt.com
* Add API client function to refresh all feeds
* Add API endpoint to refresh all feeds
* Add Auth Proxy authentication
* Use rel=prev/next on pagination links
Version 2.0.20 (Feb 15, 2020)
-----------------------------
* Add Japanese translation
* History: show entries in the order in which they were read
* Add button to add to Home screen
* Ignore enclosures without URL
* Correct spelling of "toggle"
* List view: align information to the left side, and the actionable buttons to the right
* Redirect to /unread when getting a 404 for an unread expired entry
* Do not advance to the next item when using the 'v' shortcut on the list of starred items
* Wrap around when navigating with keyboard shortcuts on a list view
* Remove unused Feed.Entries and Entry.Category from API client
* Add comments link keyboard shortcut
* Allow application/xhtml+xml links as comments URL in Atom replies
* Allow only absolute URLs in comments URL
* Use internal XML workarounds to detect feed format
* Make menu consistent across feed pages
* Make sure external URLs are not encoded incorrectly by Go template engine
* Make sure whitelisted URI schemes are handled properly by the sanitizer
* Use white background for favicon (Improve legibility when using a dark theme)
* Remove dependency on Sed to build Docker images
* Normalize URL query string before executing HTTP requests
* Improve Dublin Core support for RDF feeds
* Improve Podcast support (iTunes and Google Play feeds)
* Add support for Atom 0.3
* Add support for Atom "replies" link relation
* Return outer HTML when scraping elements
* Update scraper rule for "Le Monde"
* Filter valid XML characters for UTF-8 XML documents before decoding
* Trim spaces for RDF entry links
Version 2.0.19 (Dec 1, 2019)
----------------------------
* Add shortcut "V" to open original link in current tab
* Add the possibility to add rules during feed creation
* Wrap attachments into disclosure element
* Show attachment size on entry page
* Add support of RSS Media elements (group, description, peer link, and thumbnails)
* Add rewrite functions: convert_text_link and nl2br
* Add scraper rule for openingsource.org
* Add Makefile target to build only amd64 Docker image
* Make sure to remove integration settings when removing a user
* Add API parameter to filter entries by category
* Display list of feeds per category
* Show the number of read and unread entries for each feed
* Make sure settings menu is consistent
* Remove fixed table-layout for entry content
* Update autocert lib because ACME v1 is EOL
* Do not lighten blockquote font color
* Update de_DE translation
* Send a response when changing status of removed entries in Fever API
* Add meta tag to disable Google Translate
* Improve storage module
* Improve XML decoder to remove illegal characters
* Compare Fever token case-insensitively
* Make sure integration tests are marked as failed in Github Actions
* Add new formats to date parser
* Add notification message when using keyboard shortcuts: f, s, and m.
* Avoid keyboard shortcuts to conflict with Firefox’s "Find as you type" feature
Version 2.0.18 (Sep 25, 2019)
-----------------------------
* Add Docker image variant for arm32v7
* Add theme variants
- Use CSS variables instead of inherence
- Rename default theme to "Light - Serif"
- Rename Black theme to "Dark - Serif"
- Rename "Sans-Serif" theme to "Light - Sans Serif"
- Add "System" theme that use system preferences: Dark or Light
- Add Serif and Sans-Serif variants for each color theme
* Avoid constraint error when having duplicate entries during feed creation
* Disable strict XML parsing
* Ignore invalid content type
* Update man page
* Replace Travis by GitHub Actions
* Rename cleanup config variables and deprecate old ones
- CLEANUP_FREQUENCY_HOURS instead of CLEANUP_FREQUENCY
- CLEANUP_ARCHIVE_READ_DAYS instead of ARCHIVE_READ_DAYS
* Make configurable the number of days to remove old sessions
* Add native lazy loading for images and iframes
* Do not buffer responses in the image proxy
* Update dependencies
* Add Go 1.13 to test matrix
* Replace link border by outline to avoid slight content shift
* New rewrite function: add_mailto_subject
* Import OPML from URL
* Fix HTML injection in addImageTitle
* Accept HTML entities when parsing XML
Version 2.0.17 (Aug 3, 2019)
----------------------------
* Update Docker image to Alpine Linux 3.10.1
* Pass auth header to manifest request (crossorigin attribute)
* Sort feed categories before serialization
* Fix syntax errors in man page
* Add .search margin-right
* Ask for confirmation before flushing history, marking page as read, and mark all as read
* Add option to disable feeds
Version 2.0.16 (Jun 8, 2019)
----------------------------
* Add option to toggle date/time in log messages
* Add optional config file parser in addition to environment variables
* Make HTTP Client timeout and max body size configurable
* Refactor config package:
- Parse configuration only once during startup time
- Store configuration values in a global variable
* Flip behavior of j and k keyboard shortcuts
* Bump Postgresql client library to v1.1.1 to bring in SCRAM-SHA-256 authentication
* Add option to enable/disable keyboard shortcuts
* Add missing translation
* Improve page reload when showing unread/all entries:
- Show only unread entries = refresh current page
- Show all entries = go to next page
* Always display feed entries even when there is a feed error
* Use loading label instead of saving when submitting login form
* Add OPML v1 support during importation
* Add 'allow-popups' to iframe sandbox permissions
Version 2.0.15 (Mar 16, 2019)
-----------------------------
* Move Dockerfile to main repo
* Change location of the binary from /usr/local/bin to /usr/bin in Docker image
* Add double tap detection for next/previous page navigation
* Allow users to disable auto-remove
* Make parser compatible with Go 1.12
* Add Golang 1.12 to CI
* Use feed ID instead of user ID to check entry URLs presence
* Fix typo in stylesheet
* Sort search results by relevance
* Use preferably the published date for Atom feeds
* Add Spanish translation
* Rename session cookies
* Handle the case when application session is expired and not user session
Version 2.0.14 (Jan 13, 2019)
-----------------------------
* Only attempt to change password if the confirmation field is filled in (Firefox)
* Remove URL from client user agent
* Make the feed list order case-insensitive
* Handle XHTML Summary elements for Atom feeds
* Make UTF-8 the default encoding for XML feeds
* Add more targets to Makefile
* Add -mod=vendor in Makefile
* Move health check endpoint from ui package to httpd service
* Add workaround for non GMT dates (RFC822, RFC850, and RFC1123)
* Make sure `` elements are bold
* Show scrollbars only when necessary for
elements
* Add Italian translation
* Allow to switch between unread only and all entries on category/feed views
* Add function storage.UpdateFeedError()
* Add BBC News scraping rule
* Ignore JSON feeds from EnsureUnicode()
* Preserve category selection when no feed is found
* Update XML encoding regex to take single quotes into consideration
* Send cli errors to stderr
* Update dependencies
* Make password prompt compatible with Windows
* Make configurable the number of days to archive read items
* Change log level to debug when starting workers
* Do not show $DATABASE_URL warning when showing application info
* Move image proxy filter to template functions
* Update scraper rule for lemonde.fr
* Refactor manual entry scraper
* Apply rewriter rules on manual "Fetch Original Content"
* Add Makefile target for current OS and architecture
* Improve Makefile
Version 2.0.13 (Nov 25, 2018)
-----------------------------
* Add man page
* Add support for Systemd Socket Activation (experimental)
* Add the possibility to listen on Unix sockets
* Add config options to disable HTTP and scheduler services
* Archive more read entries in cleanup job
* Change default database connection string (backward compatible)
* Improve logging messages in ui package
* Improve overall Simplified Chinese translations
* Improve time since post date displays:
- "15 days" now is "15 days" rather than "3 weeks" ago
- "32 days" is now "1 month" rather than "2 months" ago
- "366 days" is now "1 year" rather than "2 years" ago
* Allow the scraper to parse XHTML documents
* Remove charset=utf-8 from JSON responses
* Ignore hotkeys containing Control, Alt or Meta keys
* Handle more encoding conversion edge cases
* Disable go test caching
* Avoid duplication of ldflags in Makefile
* Fix wrong translation key for category pages
* Code refactoring:
- Simplify application HTTP middlewares
- Replace daemon and scheduler package with service package
- Move UI middlewares and routes to ui package
- Move API middleware and routes to api package
- Move Fever middleware and routes to fever package
Version 2.0.12 (Oct 26, 2018)
-----------------------------
* Add OpenBSD build
* Improve logging for OAuth2 callback
* Make "g f" go to feed, or list of feeds
* Add more details in feed storage errors to facilitate debugging
* Add entries storage error to feed errors count
* Set arbitrary maximum size for tsvector column
* Unsubscribe from feed through link or "#"
* Simplify feed entries filtering
* Simplify feed fetcher
* Simplify feed parser and format detection
* Improve unit tests in url package
* Add short cli flags -i and -v
* Convert text links and line feeds to HTML in YouTube channels
* Change link state when marking all entries as read
* Add missing package descriptions for GoDoc
* Fix typo in license header
* Refactor HTTP response builder
* Improve Fever API performances when marking a feed or group as read
* Set focus on article link when pressing prev/next hotkeys
* Improve request package and add more unit tests
* Add more unit tests for config package
* Simplify locale package usage (refactoring)
* Translate application in Russian
* Use disclosure widget for advanced feed options
* Use unique translation IDs instead of English text as key
* Add more unit tests for template functions
* Fix invalid output when truncating Unicode text in templates
* Add the possibility to override default user agent for each feed
* Split Makefile linux targets by architecture
* Add compiler, Arch, and OS to info command
* Avoid line break between emoji and (un)read/(un)star links
* Build Docker image for multiple architectures (amd64, arm32v6, arm64v8)
Version 2.0.11 (Sep 11, 2018)
-----------------------------
* Set cookie flag `SameSite` to Lax mode
* Use predefined ciphers when TLS is configured
* Avoid displaying an error when shutting down the daemon
* Add "Mark this page as read" to the bottom
* Store client IP address in request context
* Refactor HTTP context handling
* Make user creation via environment variables idempotent
* Use regular text version of ✔︎ instead of emoji version on iOS
* Add toggle status button to entry page
* Migrate to Go Modules and Go 1.11
* Show count of feeds with permanent errors in header menu
* Display remote client IP in logs when having a login failure (Fail2Ban)
* Add remove button in feed edit page
* Split integration tests into multiple files
* Update scraper rule for heise.de
* Expose real error messages for internal server API errors
* Move Golang API client in project source tree (the separate project is deprecated)
* Use canonical imports
* Add Procfile
* Create database package (refactoring)
* Update user agent with new website URL
* Update German translation
Version 2.0.10 (July 22, 2018)
------------------------------
* Avoid browser caching issues when assets changes
* Add Gzip/Deflate compression for HTML, JSON, CSS and Javascript responses
* Improve themes handling
* Store user theme in session
* Logged out users will keep their theme
* Add theme background color to web manifest and meta tag
* Update application icon with different sizes
* Add support for published tag in Atom feeds
* Add tooltip to feed domain in feeds list (title attribute)
* Prevent vertical scrolling on swipe
* Show feed title instead of domain in items list
* Add service worker to cache feed icons
* Make image proxy configurable via IMAGE_PROXY environment variable:
* none = No proxy
* http-only = Proxy only non-HTTPS images (default)
* all = Proxy everything
* Add alt attribute for feed icons
* Update CI jshint check
* Add embedly.com to iframe whitelist
* Use passive event listeners for touch events
* Add `add_dynamic_image` rewriter for JavaScript-loaded images
* Change feed password field type to text to avoid auto-completion with Firefox
* Using autocomplete="off" or autocomplete="new-password" doesn't change anything
* Changing the input ID doesn't change anything
* Using a different input name doesn't change anything
* Only Chrome supports autocomplete="new-password"
* Add base URL validation
* Update default stylesheet name in HTML layout
* Pre-generate themes stylesheets at build time
* Update vendor dependencies
* Refactor assets bundler and split Javascript files
* Run sanitizer after all processing and entry content rewrite
* Remove timestamp from generated files
* Add support for protocol relative YouTube URLs
* Add Postgres full-text search for entries
* Add search form in user interface
* Add search parameter to the API
* Improve Dutch locales
* Sandbox iframes when sanitizing
* Keep consistent text size on mobile orientation change
* Change permission of /etc/miniflux.conf to 600 instead of 644 in RPM package
* Add tzdata package to Docker image
* Update Docker image to Alpine Linux 3.8
Version 2.0.9 (July 1, 2018)
----------------------------
* Avoid Chrome to autocomplete no-login password fields
* Add cli flag to reset all feed errors
* Do not ignore errored feeds when a user refresh feeds manually
* Add specific 404 and 401 error messages
* Strip binaries to reduce size
* Make sure we always get the pagination in unread mode
* Fix incorrect data value when toggling bookmark flag on entry page
* Set opener to null when opening original URL with JavaScript
* Remove unnecessary style
* Refactor AddImageTitle rewriter
* Only processes images with `src` **and** `title` attributes (others are ignored)
* Processes **all** images in the document (not just the first one)
* Wraps the image and its title attribute in a `figure` tag with the title attribute's contents in a `figcaption` tag
* Improve sanitizer to remove `style`, `noscript` and `script` tag contents
* Improve feed and user API updates with optional values
* Add new fields for feed username/password
* Improve memory usage debug log
* Disable keep-alive for HTTP client
* Close HTTP response body even for failed requests
* Add Sans-Serif theme
* Rewrite iframe Youtube URLs to https://www.youtube-nocookie.com
* Add more filters for API call `/entries`:
* before (unix timestamp)
* before_entry_id (int64)
* after (unix timestamp)
* after_entry_id (int64)
* starred (boolean)
* Rewrite individual entry pagination SQL queries
* Simplify entry query builder
* Prevent items from sticking on touchend
* Extended horizontal overflow to feed and category views
* Translate missing strings
* Update German translation
Version 2.0.8 (June 4, 2018)
----------------------------
* Add Pocket integration
* Rewrite RealIP() to avoid returning an empty string
* Convert IP field from text to inet type
* Improve error handling in integration clients
* Make unread counter clickable
* Archive read entries automatically after 60 days
* Hide horizontal overflow when swiping articles on touch devices
* Add API endpoint to get logged user
* Fever API: Return response with an empty list if there is no item
* Handle feeds with dates formatted as Unix timestamp
Version 2.0.7 (May 7, 2018)
---------------------------
* Add API endpoint to import OPML file
* Make sure to close request body in HTTP client
* Do not show save link if no integration is configured
* Make sure integrations are configured before to make any HTTP requests
* Avoid people to unlink their OAuth2 account without having a local password
* Do not use shared variable to translate templates (avoid concurrency issue)
* Use vanilla HTTP handlers (refactoring)
* Move HTTP client to its own package (refactoring)
* Add middleware to read X-Forwarded-Proto header (refactoring)
* Use Gorilla middleware (refactoring)
* Scrape parent element for iframe
* Add SoundCloud and Bandcamp iframe sources
Version 2.0.6 (Apr 20, 2018)
----------------------------
* Improve graceful shutdown
* Simplify Heroku deployment
* Display memory usage and some metrics in logs
* Increase read/write timeout for HTTP server
* Add support for Dublin Core date in RDF feeds
* Do not return an error if the user session is not found
* Handle some non-english date formats
* Add missing French translation
* Rename RSS parser getters
* Get the right comments URL when having multiple namespaces
* Ignore caching headers for feeds that send "Expires: 0"
* Update translations
Version 2.0.5 (Apr 7, 2018)
---------------------------
* Avoid unread counter to be off by one when reading an entry
* Add Comments URL to entries
* Add FreeBSD build target
* Handle RSS author elements with inner HTML
* Fix typo in translations
* Add Dutch translation
* Convert enclosure size field to bigint
* Switch CI to Go v1.10
* Fix broken OPML import when compiling with Go 1.10
Version 2.0.4 (Mar 5, 2018)
---------------------------
* Add Simplified Chinese translation
* Add Nunux Keeper integration
* Filter the list of timezones
* Add timezone to entries dates for REST and Fever API
* Show last login and session creation date in current timezone
* Fix typo in edit user template
* Improve parser error messages
* Remove parentheses around feed error messages
* Support localized feed errors generated by background workers
* Print info message if DATABASE_URL is not set
Version 2.0.3 (Feb 19, 2018)
----------------------------
* Add Polish translation
* Change color of tags for black theme
* Add database indexes (don't forget to run database migrations)
* Handle Atom feeds with HTML title
* Strip invalid XML characters to avoid parsing errors
* Improve error handling for HTTP client
Version 2.0.2 (Feb 5, 2018)
---------------------------
* Add support for Let's Encrypt http-01 challenge
* Move template functions outside engine (refactoring)
* Take timezone into consideration when calculating relative time
* Add support for HTTP Strict Transport Security header
* Add support for base URLs with subfolders
* Add missing about menu in settings
* Show API URL endpoints in user interface
* Do not update entry date while refreshing a feed
* Add flag to toggle debug logging
* Improve unread counter updates
Version 2.0.1 (Jan 22, 2018)
----------------------------
* Change user agent (People are blocking the crawler with mod_security)
* Move environment variables to config package (refactoring)
* Add build targets for all ARM architectures
* Do not crawl existing entry URLs
* Show translated login page in user language when logged out
* Handle more encoding edge cases:
- Feeds with charset specified only in Content-Type header and not in XML document
- Feeds with charset specified in both places
- Feeds with charset specified only in XML document and not in HTTP header
* Add German translation
* Add mark as read/unread link on list items
* Add API endpoint for OPML export
Version 2.0.0 (Jan 11, 2018)
----------------------------
* Initial release of Miniflux 2.
v2-2.2.6/LICENSE 0000664 0000000 0000000 00000023676 14756465373 0013117 0 ustar 00root root 0000000 0000000
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
v2-2.2.6/Makefile 0000664 0000000 0000000 00000013216 14756465373 0013537 0 ustar 00root root 0000000 0000000 APP := miniflux
DOCKER_IMAGE := miniflux/miniflux
VERSION := $(shell git describe --tags --abbrev=0 2>/dev/null)
COMMIT := $(shell git rev-parse --short HEAD 2>/dev/null)
BUILD_DATE := `date +%FT%T%z`
LD_FLAGS := "-s -w -X 'miniflux.app/v2/internal/version.Version=$(VERSION)' -X 'miniflux.app/v2/internal/version.Commit=$(COMMIT)' -X 'miniflux.app/v2/internal/version.BuildDate=$(BUILD_DATE)'"
PKG_LIST := $(shell go list ./... | grep -v /vendor/)
DB_URL := postgres://postgres:postgres@localhost/miniflux_test?sslmode=disable
DOCKER_PLATFORM := amd64
export PGPASSWORD := postgres
.PHONY: \
miniflux \
miniflux-no-pie \
linux-amd64 \
linux-arm64 \
linux-armv7 \
linux-armv6 \
linux-armv5 \
linux-x86 \
darwin-amd64 \
darwin-arm64 \
freebsd-amd64 \
freebsd-x86 \
openbsd-amd64 \
openbsd-x86 \
netbsd-x86 \
netbsd-amd64 \
windows-amd64 \
windows-x86 \
build \
run \
clean \
test \
lint \
integration-test \
clean-integration-test \
docker-image \
docker-image-distroless \
docker-images \
rpm \
debian \
debian-packages
miniflux:
@ go build -buildmode=pie -ldflags=$(LD_FLAGS) -o $(APP) main.go
miniflux-no-pie:
@ go build -ldflags=$(LD_FLAGS) -o $(APP) main.go
linux-amd64:
@ CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
linux-arm64:
@ CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
linux-armv7:
@ CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=7 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
linux-armv6:
@ CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=6 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
linux-armv5:
@ CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=5 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
darwin-amd64:
@ GOOS=darwin GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
darwin-arm64:
@ GOOS=darwin GOARCH=arm64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
freebsd-amd64:
@ CGO_ENABLED=0 GOOS=freebsd GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
openbsd-amd64:
@ GOOS=openbsd GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
@ sha256sum $(APP)-$@ > $(APP)-$@.sha256
windows-amd64:
@ GOOS=windows GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@.exe main.go
@ sha256sum $(APP)-$@.exe > $(APP)-$@.exe.sha256
build: linux-amd64 linux-arm64 linux-armv7 linux-armv6 linux-armv5 darwin-amd64 darwin-arm64 freebsd-amd64 openbsd-amd64 windows-amd64
# NOTE: unsupported targets
netbsd-amd64:
@ CGO_ENABLED=0 GOOS=netbsd GOARCH=amd64 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
linux-x86:
@ CGO_ENABLED=0 GOOS=linux GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
freebsd-x86:
@ CGO_ENABLED=0 GOOS=freebsd GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
netbsd-x86:
@ CGO_ENABLED=0 GOOS=netbsd GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
openbsd-x86:
@ GOOS=openbsd GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@ main.go
windows-x86:
@ GOOS=windows GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@.exe main.go
run:
@ LOG_DATE_TIME=1 LOG_LEVEL=debug RUN_MIGRATIONS=1 CREATE_ADMIN=1 ADMIN_USERNAME=admin ADMIN_PASSWORD=test123 go run main.go
clean:
@ rm -f $(APP)-* $(APP) $(APP)*.rpm $(APP)*.deb $(APP)*.exe $(APP)*.sha256
test:
go test -cover -race -count=1 ./...
lint:
go vet ./...
staticcheck ./...
golangci-lint run --disable errcheck --enable sqlclosecheck --enable misspell --enable gofmt --enable goimports --enable whitespace
integration-test:
psql -U postgres -c 'drop database if exists miniflux_test;'
psql -U postgres -c 'create database miniflux_test;'
go build -o miniflux-test main.go
DATABASE_URL=$(DB_URL) \
ADMIN_USERNAME=admin \
ADMIN_PASSWORD=test123 \
CREATE_ADMIN=1 \
RUN_MIGRATIONS=1 \
DEBUG=1 \
./miniflux-test >/tmp/miniflux.log 2>&1 & echo "$$!" > "/tmp/miniflux.pid"
while ! nc -z localhost 8080; do sleep 1; done
TEST_MINIFLUX_BASE_URL=http://127.0.0.1:8080 \
TEST_MINIFLUX_ADMIN_USERNAME=admin \
TEST_MINIFLUX_ADMIN_PASSWORD=test123 \
go test -v -count=1 ./internal/api
clean-integration-test:
@ kill -9 `cat /tmp/miniflux.pid`
@ rm -f /tmp/miniflux.pid /tmp/miniflux.log
@ rm miniflux-test
@ psql -U postgres -c 'drop database if exists miniflux_test;'
docker-image:
docker build --pull -t $(DOCKER_IMAGE):$(VERSION) -f packaging/docker/alpine/Dockerfile .
docker-image-distroless:
docker build -t $(DOCKER_IMAGE):$(VERSION) -f packaging/docker/distroless/Dockerfile .
docker-images:
docker buildx build \
--platform linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 \
--file packaging/docker/alpine/Dockerfile \
--tag $(DOCKER_IMAGE):$(VERSION) \
--push .
rpm: clean
@ docker build \
-t miniflux-rpm-builder \
-f packaging/rpm/Dockerfile \
.
@ docker run --rm \
-v ${PWD}:/root/rpmbuild/RPMS/x86_64 miniflux-rpm-builder \
rpmbuild -bb --define "_miniflux_version $(VERSION)" /root/rpmbuild/SPECS/miniflux.spec
debian:
@ docker buildx build --load \
--platform linux/$(DOCKER_PLATFORM) \
-t miniflux-deb-builder \
-f packaging/debian/Dockerfile \
.
@ docker run --rm --platform linux/$(DOCKER_PLATFORM) \
-v ${PWD}:/pkg miniflux-deb-builder
debian-packages: clean
$(MAKE) debian DOCKER_PLATFORM=amd64
$(MAKE) debian DOCKER_PLATFORM=arm64
$(MAKE) debian DOCKER_PLATFORM=arm/v7
v2-2.2.6/Procfile 0000664 0000000 0000000 00000000022 14756465373 0013554 0 ustar 00root root 0000000 0000000 web: miniflux.app
v2-2.2.6/README.md 0000664 0000000 0000000 00000003640 14756465373 0013356 0 ustar 00root root 0000000 0000000 Miniflux 2
==========
Miniflux is a minimalist and opinionated feed reader:
- Written in Go (Golang)
- Works only with Postgresql
- Doesn't use any ORM
- Doesn't use any complicated framework
- Use only modern vanilla Javascript (ES6 and Fetch API)
- Single binary compiled statically without dependency
- The number of features is voluntarily limited
It's simple, fast, lightweight and super easy to install.
Official website:
Documentation
-------------
The Miniflux documentation is available here: ([Man page](https://miniflux.app/miniflux.1.html))
- [Opinionated?](https://miniflux.app/opinionated.html)
- [Features](https://miniflux.app/features.html)
- [Requirements](https://miniflux.app/docs/requirements.html)
- [Installation Instructions](https://miniflux.app/docs/installation.html)
- [Upgrading to a New Version](https://miniflux.app/docs/upgrade.html)
- [Configuration](https://miniflux.app/docs/configuration.html)
- [Command Line Usage](https://miniflux.app/docs/cli.html)
- [User Interface Usage](https://miniflux.app/docs/ui.html)
- [Keyboard Shortcuts](https://miniflux.app/docs/keyboard_shortcuts.html)
- [Integration with External Services](https://miniflux.app/docs/services.html)
- [Rewrite and Scraper Rules](https://miniflux.app/docs/rules.html)
- [API Reference](https://miniflux.app/docs/api.html)
- [Development](https://miniflux.app/docs/development.html)
- [Internationalization](https://miniflux.app/docs/i18n.html)
- [Frequently Asked Questions](https://miniflux.app/faq.html)
Screenshots
-----------
Default theme:

Dark theme when using keyboard navigation:

Credits
-------
- Authors: Frédéric Guillot - [List of contributors](https://github.com/miniflux/v2/graphs/contributors)
- Distributed under Apache 2.0 License
v2-2.2.6/SECURITY.md 0000664 0000000 0000000 00000001014 14756465373 0013661 0 ustar 00root root 0000000 0000000 # Security Policy
## Supported Versions
Only the latest stable version is supported.
## Reporting a Vulnerability
Preferably, [report the vulnerability privately using GitHub](https://github.com/miniflux/v2/security/advisories/new) ([documentation](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability)).
If you do not want to use GitHub, send an email to `security AT miniflux DOT net` with all the steps to reproduce the problem.
v2-2.2.6/client/ 0000775 0000000 0000000 00000000000 14756465373 0013352 5 ustar 00root root 0000000 0000000 v2-2.2.6/client/README.md 0000664 0000000 0000000 00000001765 14756465373 0014642 0 ustar 00root root 0000000 0000000 Miniflux API Client
===================
[](https://pkg.go.dev/miniflux.app/v2/client)
Client library for Miniflux REST API.
Installation
------------
```bash
go get -u miniflux.app/v2/client
```
Example
-------
```go
package main
import (
"fmt"
"os"
miniflux "miniflux.app/v2/client"
)
func main() {
// Authentication with username/password:
client := miniflux.New("https://api.example.org", "admin", "secret")
// Authentication with an API Key:
client := miniflux.New("https://api.example.org", "my-secret-token")
// Fetch all feeds.
feeds, err := client.Feeds()
if err != nil {
fmt.Println(err)
return
}
fmt.Println(feeds)
// Backup your feeds to an OPML file.
opml, err := client.Export()
if err != nil {
fmt.Println(err)
return
}
err = os.WriteFile("opml.xml", opml, 0644)
if err != nil {
fmt.Println(err)
return
}
}
```
v2-2.2.6/client/client.go 0000664 0000000 0000000 00000045220 14756465373 0015162 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package client // import "miniflux.app/v2/client"
import (
"encoding/json"
"fmt"
"io"
"net/url"
"strconv"
"strings"
)
// Client holds API procedure calls.
type Client struct {
request *request
}
// New returns a new Miniflux client.
// Deprecated: use NewClient instead.
func New(endpoint string, credentials ...string) *Client {
return NewClient(endpoint, credentials...)
}
// NewClient returns a new Miniflux client.
func NewClient(endpoint string, credentials ...string) *Client {
// Trim trailing slashes and /v1 from the endpoint.
endpoint = strings.TrimSuffix(endpoint, "/")
endpoint = strings.TrimSuffix(endpoint, "/v1")
switch len(credentials) {
case 2:
return &Client{request: &request{endpoint: endpoint, username: credentials[0], password: credentials[1]}}
case 1:
return &Client{request: &request{endpoint: endpoint, apiKey: credentials[0]}}
default:
return &Client{request: &request{endpoint: endpoint}}
}
}
// Healthcheck checks if the application is up and running.
func (c *Client) Healthcheck() error {
body, err := c.request.Get("/healthcheck")
if err != nil {
return fmt.Errorf("miniflux: unable to perform healthcheck: %w", err)
}
defer body.Close()
responseBodyContent, err := io.ReadAll(body)
if err != nil {
return fmt.Errorf("miniflux: unable to read healthcheck response: %w", err)
}
if string(responseBodyContent) != "OK" {
return fmt.Errorf("miniflux: invalid healthcheck response: %q", responseBodyContent)
}
return nil
}
// Version returns the version of the Miniflux instance.
func (c *Client) Version() (*VersionResponse, error) {
body, err := c.request.Get("/v1/version")
if err != nil {
return nil, err
}
defer body.Close()
var versionResponse *VersionResponse
if err := json.NewDecoder(body).Decode(&versionResponse); err != nil {
return nil, fmt.Errorf("miniflux: json error (%v)", err)
}
return versionResponse, nil
}
// Me returns the logged user information.
func (c *Client) Me() (*User, error) {
body, err := c.request.Get("/v1/me")
if err != nil {
return nil, err
}
defer body.Close()
var user *User
if err := json.NewDecoder(body).Decode(&user); err != nil {
return nil, fmt.Errorf("miniflux: json error (%v)", err)
}
return user, nil
}
// Users returns all users.
func (c *Client) Users() (Users, error) {
body, err := c.request.Get("/v1/users")
if err != nil {
return nil, err
}
defer body.Close()
var users Users
if err := json.NewDecoder(body).Decode(&users); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return users, nil
}
// UserByID returns a single user.
func (c *Client) UserByID(userID int64) (*User, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/users/%d", userID))
if err != nil {
return nil, err
}
defer body.Close()
var user User
if err := json.NewDecoder(body).Decode(&user); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &user, nil
}
// UserByUsername returns a single user.
func (c *Client) UserByUsername(username string) (*User, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/users/%s", username))
if err != nil {
return nil, err
}
defer body.Close()
var user User
if err := json.NewDecoder(body).Decode(&user); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &user, nil
}
// CreateUser creates a new user in the system.
func (c *Client) CreateUser(username, password string, isAdmin bool) (*User, error) {
body, err := c.request.Post("/v1/users", &UserCreationRequest{
Username: username,
Password: password,
IsAdmin: isAdmin,
})
if err != nil {
return nil, err
}
defer body.Close()
var user *User
if err := json.NewDecoder(body).Decode(&user); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return user, nil
}
// UpdateUser updates a user in the system.
func (c *Client) UpdateUser(userID int64, userChanges *UserModificationRequest) (*User, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/users/%d", userID), userChanges)
if err != nil {
return nil, err
}
defer body.Close()
var u *User
if err := json.NewDecoder(body).Decode(&u); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return u, nil
}
// DeleteUser removes a user from the system.
func (c *Client) DeleteUser(userID int64) error {
return c.request.Delete(fmt.Sprintf("/v1/users/%d", userID))
}
// MarkAllAsRead marks all unread entries as read for a given user.
func (c *Client) MarkAllAsRead(userID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/users/%d/mark-all-as-read", userID), nil)
return err
}
// IntegrationsStatus fetches the integrations status for the logged user.
func (c *Client) IntegrationsStatus() (bool, error) {
body, err := c.request.Get("/v1/integrations/status")
if err != nil {
return false, err
}
defer body.Close()
var response struct {
HasIntegrations bool `json:"has_integrations"`
}
if err := json.NewDecoder(body).Decode(&response); err != nil {
return false, fmt.Errorf("miniflux: response error (%v)", err)
}
return response.HasIntegrations, nil
}
// Discover try to find subscriptions from a website.
func (c *Client) Discover(url string) (Subscriptions, error) {
body, err := c.request.Post("/v1/discover", map[string]string{"url": url})
if err != nil {
return nil, err
}
defer body.Close()
var subscriptions Subscriptions
if err := json.NewDecoder(body).Decode(&subscriptions); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return subscriptions, nil
}
// Categories gets the list of categories.
func (c *Client) Categories() (Categories, error) {
body, err := c.request.Get("/v1/categories")
if err != nil {
return nil, err
}
defer body.Close()
var categories Categories
if err := json.NewDecoder(body).Decode(&categories); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return categories, nil
}
// CreateCategory creates a new category.
func (c *Client) CreateCategory(title string) (*Category, error) {
body, err := c.request.Post("/v1/categories", map[string]interface{}{
"title": title,
})
if err != nil {
return nil, err
}
defer body.Close()
var category *Category
if err := json.NewDecoder(body).Decode(&category); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return category, nil
}
// UpdateCategory updates a category.
func (c *Client) UpdateCategory(categoryID int64, title string) (*Category, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/categories/%d", categoryID), map[string]interface{}{
"title": title,
})
if err != nil {
return nil, err
}
defer body.Close()
var category *Category
if err := json.NewDecoder(body).Decode(&category); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return category, nil
}
// MarkCategoryAsRead marks all unread entries in a category as read.
func (c *Client) MarkCategoryAsRead(categoryID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/categories/%d/mark-all-as-read", categoryID), nil)
return err
}
// CategoryFeeds gets feeds of a category.
func (c *Client) CategoryFeeds(categoryID int64) (Feeds, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/categories/%d/feeds", categoryID))
if err != nil {
return nil, err
}
defer body.Close()
var feeds Feeds
if err := json.NewDecoder(body).Decode(&feeds); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return feeds, nil
}
// DeleteCategory removes a category.
func (c *Client) DeleteCategory(categoryID int64) error {
return c.request.Delete(fmt.Sprintf("/v1/categories/%d", categoryID))
}
// RefreshCategory refreshes a category.
func (c *Client) RefreshCategory(categoryID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/categories/%d/refresh", categoryID), nil)
return err
}
// Feeds gets all feeds.
func (c *Client) Feeds() (Feeds, error) {
body, err := c.request.Get("/v1/feeds")
if err != nil {
return nil, err
}
defer body.Close()
var feeds Feeds
if err := json.NewDecoder(body).Decode(&feeds); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return feeds, nil
}
// Export creates OPML file.
func (c *Client) Export() ([]byte, error) {
body, err := c.request.Get("/v1/export")
if err != nil {
return nil, err
}
defer body.Close()
opml, err := io.ReadAll(body)
if err != nil {
return nil, err
}
return opml, nil
}
// Import imports an OPML file.
func (c *Client) Import(f io.ReadCloser) error {
_, err := c.request.PostFile("/v1/import", f)
return err
}
// Feed gets a feed.
func (c *Client) Feed(feedID int64) (*Feed, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/feeds/%d", feedID))
if err != nil {
return nil, err
}
defer body.Close()
var feed *Feed
if err := json.NewDecoder(body).Decode(&feed); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return feed, nil
}
// CreateFeed creates a new feed.
func (c *Client) CreateFeed(feedCreationRequest *FeedCreationRequest) (int64, error) {
body, err := c.request.Post("/v1/feeds", feedCreationRequest)
if err != nil {
return 0, err
}
defer body.Close()
type result struct {
FeedID int64 `json:"feed_id"`
}
var r result
if err := json.NewDecoder(body).Decode(&r); err != nil {
return 0, fmt.Errorf("miniflux: response error (%v)", err)
}
return r.FeedID, nil
}
// UpdateFeed updates a feed.
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModificationRequest) (*Feed, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d", feedID), feedChanges)
if err != nil {
return nil, err
}
defer body.Close()
var f *Feed
if err := json.NewDecoder(body).Decode(&f); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return f, nil
}
// MarkFeedAsRead marks all unread entries of the feed as read.
func (c *Client) MarkFeedAsRead(feedID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d/mark-all-as-read", feedID), nil)
return err
}
// RefreshAllFeeds refreshes all feeds.
func (c *Client) RefreshAllFeeds() error {
_, err := c.request.Put("/v1/feeds/refresh", nil)
return err
}
// RefreshFeed refreshes a feed.
func (c *Client) RefreshFeed(feedID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d/refresh", feedID), nil)
return err
}
// DeleteFeed removes a feed.
func (c *Client) DeleteFeed(feedID int64) error {
return c.request.Delete(fmt.Sprintf("/v1/feeds/%d", feedID))
}
// FeedIcon gets a feed icon.
func (c *Client) FeedIcon(feedID int64) (*FeedIcon, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/feeds/%d/icon", feedID))
if err != nil {
return nil, err
}
defer body.Close()
var feedIcon *FeedIcon
if err := json.NewDecoder(body).Decode(&feedIcon); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return feedIcon, nil
}
// FeedEntry gets a single feed entry.
func (c *Client) FeedEntry(feedID, entryID int64) (*Entry, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/feeds/%d/entries/%d", feedID, entryID))
if err != nil {
return nil, err
}
defer body.Close()
var entry *Entry
if err := json.NewDecoder(body).Decode(&entry); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return entry, nil
}
// CategoryEntry gets a single category entry.
func (c *Client) CategoryEntry(categoryID, entryID int64) (*Entry, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/categories/%d/entries/%d", categoryID, entryID))
if err != nil {
return nil, err
}
defer body.Close()
var entry *Entry
if err := json.NewDecoder(body).Decode(&entry); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return entry, nil
}
// Entry gets a single entry.
func (c *Client) Entry(entryID int64) (*Entry, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/entries/%d", entryID))
if err != nil {
return nil, err
}
defer body.Close()
var entry *Entry
if err := json.NewDecoder(body).Decode(&entry); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return entry, nil
}
// Entries fetch entries.
func (c *Client) Entries(filter *Filter) (*EntryResultSet, error) {
path := buildFilterQueryString("/v1/entries", filter)
body, err := c.request.Get(path)
if err != nil {
return nil, err
}
defer body.Close()
var result EntryResultSet
if err := json.NewDecoder(body).Decode(&result); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &result, nil
}
// FeedEntries fetch feed entries.
func (c *Client) FeedEntries(feedID int64, filter *Filter) (*EntryResultSet, error) {
path := buildFilterQueryString(fmt.Sprintf("/v1/feeds/%d/entries", feedID), filter)
body, err := c.request.Get(path)
if err != nil {
return nil, err
}
defer body.Close()
var result EntryResultSet
if err := json.NewDecoder(body).Decode(&result); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &result, nil
}
// CategoryEntries fetch entries of a category.
func (c *Client) CategoryEntries(categoryID int64, filter *Filter) (*EntryResultSet, error) {
path := buildFilterQueryString(fmt.Sprintf("/v1/categories/%d/entries", categoryID), filter)
body, err := c.request.Get(path)
if err != nil {
return nil, err
}
defer body.Close()
var result EntryResultSet
if err := json.NewDecoder(body).Decode(&result); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &result, nil
}
// UpdateEntries updates the status of a list of entries.
func (c *Client) UpdateEntries(entryIDs []int64, status string) error {
type payload struct {
EntryIDs []int64 `json:"entry_ids"`
Status string `json:"status"`
}
_, err := c.request.Put("/v1/entries", &payload{EntryIDs: entryIDs, Status: status})
return err
}
// UpdateEntry updates an entry.
func (c *Client) UpdateEntry(entryID int64, entryChanges *EntryModificationRequest) (*Entry, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/entries/%d", entryID), entryChanges)
if err != nil {
return nil, err
}
defer body.Close()
var entry *Entry
if err := json.NewDecoder(body).Decode(&entry); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return entry, nil
}
// ToggleBookmark toggles entry bookmark value.
func (c *Client) ToggleBookmark(entryID int64) error {
_, err := c.request.Put(fmt.Sprintf("/v1/entries/%d/bookmark", entryID), nil)
return err
}
// SaveEntry sends an entry to a third-party service.
func (c *Client) SaveEntry(entryID int64) error {
_, err := c.request.Post(fmt.Sprintf("/v1/entries/%d/save", entryID), nil)
return err
}
// FetchEntryOriginalContent fetches the original content of an entry using the scraper.
func (c *Client) FetchEntryOriginalContent(entryID int64) (string, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/entries/%d/fetch-content", entryID))
if err != nil {
return "", err
}
defer body.Close()
var response struct {
Content string `json:"content"`
}
if err := json.NewDecoder(body).Decode(&response); err != nil {
return "", fmt.Errorf("miniflux: response error (%v)", err)
}
return response.Content, nil
}
// FetchCounters fetches feed counters.
func (c *Client) FetchCounters() (*FeedCounters, error) {
body, err := c.request.Get("/v1/feeds/counters")
if err != nil {
return nil, err
}
defer body.Close()
var result FeedCounters
if err := json.NewDecoder(body).Decode(&result); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return &result, nil
}
// FlushHistory changes all entries with the status "read" to "removed".
func (c *Client) FlushHistory() error {
_, err := c.request.Put("/v1/flush-history", nil)
return err
}
// Icon fetches a feed icon.
func (c *Client) Icon(iconID int64) (*FeedIcon, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/icons/%d", iconID))
if err != nil {
return nil, err
}
defer body.Close()
var feedIcon *FeedIcon
if err := json.NewDecoder(body).Decode(&feedIcon); err != nil {
return nil, fmt.Errorf("miniflux: response error (%v)", err)
}
return feedIcon, nil
}
// Enclosure fetches a specific enclosure.
func (c *Client) Enclosure(enclosureID int64) (*Enclosure, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/enclosures/%d", enclosureID))
if err != nil {
return nil, err
}
defer body.Close()
var enclosure *Enclosure
if err := json.NewDecoder(body).Decode(&enclosure); err != nil {
return nil, fmt.Errorf("miniflux: response error(%v)", err)
}
return enclosure, nil
}
// UpdateEnclosure updates an enclosure.
func (c *Client) UpdateEnclosure(enclosureID int64, enclosureUpdate *EnclosureUpdateRequest) error {
_, err := c.request.Put(fmt.Sprintf("/v1/enclosures/%d", enclosureID), enclosureUpdate)
return err
}
func buildFilterQueryString(path string, filter *Filter) string {
if filter != nil {
values := url.Values{}
if filter.Status != "" {
values.Set("status", filter.Status)
}
if filter.Direction != "" {
values.Set("direction", filter.Direction)
}
if filter.Order != "" {
values.Set("order", filter.Order)
}
if filter.Limit >= 0 {
values.Set("limit", strconv.Itoa(filter.Limit))
}
if filter.Offset >= 0 {
values.Set("offset", strconv.Itoa(filter.Offset))
}
if filter.After > 0 {
values.Set("after", strconv.FormatInt(filter.After, 10))
}
if filter.Before > 0 {
values.Set("before", strconv.FormatInt(filter.Before, 10))
}
if filter.PublishedAfter > 0 {
values.Set("published_after", strconv.FormatInt(filter.PublishedAfter, 10))
}
if filter.PublishedBefore > 0 {
values.Set("published_before", strconv.FormatInt(filter.PublishedBefore, 10))
}
if filter.ChangedAfter > 0 {
values.Set("changed_after", strconv.FormatInt(filter.ChangedAfter, 10))
}
if filter.ChangedBefore > 0 {
values.Set("changed_before", strconv.FormatInt(filter.ChangedBefore, 10))
}
if filter.AfterEntryID > 0 {
values.Set("after_entry_id", strconv.FormatInt(filter.AfterEntryID, 10))
}
if filter.BeforeEntryID > 0 {
values.Set("before_entry_id", strconv.FormatInt(filter.BeforeEntryID, 10))
}
if filter.Starred != "" {
values.Set("starred", filter.Starred)
}
if filter.Search != "" {
values.Set("search", filter.Search)
}
if filter.CategoryID > 0 {
values.Set("category_id", strconv.FormatInt(filter.CategoryID, 10))
}
if filter.FeedID > 0 {
values.Set("feed_id", strconv.FormatInt(filter.FeedID, 10))
}
if filter.GloballyVisible {
values.Set("globally_visible", "true")
}
for _, status := range filter.Statuses {
values.Add("status", status)
}
path = fmt.Sprintf("%s?%s", path, values.Encode())
}
return path
}
v2-2.2.6/client/doc.go 0000664 0000000 0000000 00000001317 14756465373 0014450 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
/*
Package client implements a client library for the Miniflux REST API.
# Examples
This code snippet fetch the list of users:
import (
miniflux "miniflux.app/v2/client"
)
client := miniflux.NewClient("https://api.example.org", "admin", "secret")
users, err := client.Users()
if err != nil {
fmt.Println(err)
return
}
fmt.Println(users, err)
This one discover subscriptions on a website:
subscriptions, err := client.Discover("https://example.org/")
if err != nil {
fmt.Println(err)
return
}
fmt.Println(subscriptions)
*/
package client // import "miniflux.app/v2/client"
v2-2.2.6/client/model.go 0000664 0000000 0000000 00000027553 14756465373 0015015 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package client // import "miniflux.app/v2/client"
import (
"fmt"
"time"
)
// Entry statuses.
const (
EntryStatusUnread = "unread"
EntryStatusRead = "read"
EntryStatusRemoved = "removed"
)
// User represents a user in the system.
type User struct {
ID int64 `json:"id"`
Username string `json:"username"`
Password string `json:"password,omitempty"`
IsAdmin bool `json:"is_admin"`
Theme string `json:"theme"`
Language string `json:"language"`
Timezone string `json:"timezone"`
EntryDirection string `json:"entry_sorting_direction"`
EntryOrder string `json:"entry_sorting_order"`
Stylesheet string `json:"stylesheet"`
CustomJS string `json:"custom_js"`
GoogleID string `json:"google_id"`
OpenIDConnectID string `json:"openid_connect_id"`
EntriesPerPage int `json:"entries_per_page"`
KeyboardShortcuts bool `json:"keyboard_shortcuts"`
ShowReadingTime bool `json:"show_reading_time"`
EntrySwipe bool `json:"entry_swipe"`
GestureNav string `json:"gesture_nav"`
LastLoginAt *time.Time `json:"last_login_at"`
DisplayMode string `json:"display_mode"`
DefaultReadingSpeed int `json:"default_reading_speed"`
CJKReadingSpeed int `json:"cjk_reading_speed"`
DefaultHomePage string `json:"default_home_page"`
CategoriesSortingOrder string `json:"categories_sorting_order"`
MarkReadOnView bool `json:"mark_read_on_view"`
MediaPlaybackRate float64 `json:"media_playback_rate"`
BlockFilterEntryRules string `json:"block_filter_entry_rules"`
KeepFilterEntryRules string `json:"keep_filter_entry_rules"`
ExternalFontHosts string `json:"external_font_hosts"`
}
func (u User) String() string {
return fmt.Sprintf("#%d - %s (admin=%v)", u.ID, u.Username, u.IsAdmin)
}
// UserCreationRequest represents the request to create a user.
type UserCreationRequest struct {
Username string `json:"username"`
Password string `json:"password"`
IsAdmin bool `json:"is_admin"`
GoogleID string `json:"google_id"`
OpenIDConnectID string `json:"openid_connect_id"`
}
// UserModificationRequest represents the request to update a user.
type UserModificationRequest struct {
Username *string `json:"username"`
Password *string `json:"password"`
IsAdmin *bool `json:"is_admin"`
Theme *string `json:"theme"`
Language *string `json:"language"`
Timezone *string `json:"timezone"`
EntryDirection *string `json:"entry_sorting_direction"`
EntryOrder *string `json:"entry_sorting_order"`
Stylesheet *string `json:"stylesheet"`
CustomJS *string `json:"custom_js"`
GoogleID *string `json:"google_id"`
OpenIDConnectID *string `json:"openid_connect_id"`
EntriesPerPage *int `json:"entries_per_page"`
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
ShowReadingTime *bool `json:"show_reading_time"`
EntrySwipe *bool `json:"entry_swipe"`
GestureNav *string `json:"gesture_nav"`
DisplayMode *string `json:"display_mode"`
DefaultReadingSpeed *int `json:"default_reading_speed"`
CJKReadingSpeed *int `json:"cjk_reading_speed"`
DefaultHomePage *string `json:"default_home_page"`
CategoriesSortingOrder *string `json:"categories_sorting_order"`
MarkReadOnView *bool `json:"mark_read_on_view"`
MediaPlaybackRate *float64 `json:"media_playback_rate"`
BlockFilterEntryRules *string `json:"block_filter_entry_rules"`
KeepFilterEntryRules *string `json:"keep_filter_entry_rules"`
ExternalFontHosts *string `json:"external_font_hosts"`
}
// Users represents a list of users.
type Users []User
// Category represents a feed category.
type Category struct {
ID int64 `json:"id,omitempty"`
Title string `json:"title,omitempty"`
UserID int64 `json:"user_id,omitempty"`
}
func (c Category) String() string {
return fmt.Sprintf("#%d %s", c.ID, c.Title)
}
// Categories represents a list of categories.
type Categories []*Category
// Subscription represents a feed subscription.
type Subscription struct {
Title string `json:"title"`
URL string `json:"url"`
Type string `json:"type"`
}
func (s Subscription) String() string {
return fmt.Sprintf(`Title=%q, URL=%q, Type=%q`, s.Title, s.URL, s.Type)
}
// Subscriptions represents a list of subscriptions.
type Subscriptions []*Subscription
// Feed represents a Miniflux feed.
type Feed struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
CheckedAt time.Time `json:"checked_at,omitempty"`
EtagHeader string `json:"etag_header,omitempty"`
LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
Crawler bool `json:"crawler"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Category *Category `json:"category,omitempty"`
HideGlobally bool `json:"hide_globally"`
DisableHTTP2 bool `json:"disable_http2"`
}
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
HideGlobally bool `json:"hide_globally"`
DisableHTTP2 bool `json:"disable_http2"`
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Cookie *string `json:"cookie"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
HideGlobally *bool `json:"hide_globally"`
DisableHTTP2 *bool `json:"disable_http2"`
}
// FeedIcon represents the feed icon.
type FeedIcon struct {
ID int64 `json:"id"`
MimeType string `json:"mime_type"`
Data string `json:"data"`
}
type FeedCounters struct {
ReadCounters map[int64]int `json:"reads"`
UnreadCounters map[int64]int `json:"unreads"`
}
// Feeds represents a list of feeds.
type Feeds []*Feed
// Entry represents a subscription item in the system.
type Entry struct {
ID int64 `json:"id"`
Date time.Time `json:"published_at"`
ChangedAt time.Time `json:"changed_at"`
CreatedAt time.Time `json:"created_at"`
Feed *Feed `json:"feed,omitempty"`
Hash string `json:"hash"`
URL string `json:"url"`
CommentsURL string `json:"comments_url"`
Title string `json:"title"`
Status string `json:"status"`
Content string `json:"content"`
Author string `json:"author"`
ShareCode string `json:"share_code"`
Enclosures Enclosures `json:"enclosures,omitempty"`
Tags []string `json:"tags"`
ReadingTime int `json:"reading_time"`
UserID int64 `json:"user_id"`
FeedID int64 `json:"feed_id"`
Starred bool `json:"starred"`
}
// EntryModificationRequest represents a request to modify an entry.
type EntryModificationRequest struct {
Title *string `json:"title"`
Content *string `json:"content"`
}
// Entries represents a list of entries.
type Entries []*Entry
// Enclosure represents an attachment.
type Enclosure struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
EntryID int64 `json:"entry_id"`
URL string `json:"url"`
MimeType string `json:"mime_type"`
Size int `json:"size"`
MediaProgression int64 `json:"media_progression"`
}
type EnclosureUpdateRequest struct {
MediaProgression int64 `json:"media_progression"`
}
// Enclosures represents a list of attachments.
type Enclosures []*Enclosure
const (
FilterNotStarred = "0"
FilterOnlyStarred = "1"
)
// Filter is used to filter entries.
type Filter struct {
Status string
Offset int
Limit int
Order string
Direction string
Starred string
Before int64
After int64
PublishedBefore int64
PublishedAfter int64
ChangedBefore int64
ChangedAfter int64
BeforeEntryID int64
AfterEntryID int64
Search string
CategoryID int64
FeedID int64
Statuses []string
GloballyVisible bool
}
// EntryResultSet represents the response when fetching entries.
type EntryResultSet struct {
Total int `json:"total"`
Entries Entries `json:"entries"`
}
// VersionResponse represents the version and the build information of the Miniflux instance.
type VersionResponse struct {
Version string `json:"version"`
Commit string `json:"commit"`
BuildDate string `json:"build_date"`
GoVersion string `json:"go_version"`
Compiler string `json:"compiler"`
Arch string `json:"arch"`
OS string `json:"os"`
}
func SetOptionalField[T any](value T) *T {
return &value
}
v2-2.2.6/client/request.go 0000664 0000000 0000000 00000010013 14756465373 0015364 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package client // import "miniflux.app/v2/client"
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"net/url"
"time"
)
const (
userAgent = "Miniflux Client Library"
defaultTimeout = 80
)
// List of exposed errors.
var (
ErrNotAuthorized = errors.New("miniflux: unauthorized (bad credentials)")
ErrForbidden = errors.New("miniflux: access forbidden")
ErrServerError = errors.New("miniflux: internal server error")
ErrNotFound = errors.New("miniflux: resource not found")
ErrBadRequest = errors.New("miniflux: bad request")
ErrEmptyEndpoint = errors.New("miniflux: empty endpoint provided")
)
type errorResponse struct {
ErrorMessage string `json:"error_message"`
}
type request struct {
endpoint string
username string
password string
apiKey string
}
func (r *request) Get(path string) (io.ReadCloser, error) {
return r.execute(http.MethodGet, path, nil)
}
func (r *request) Post(path string, data interface{}) (io.ReadCloser, error) {
return r.execute(http.MethodPost, path, data)
}
func (r *request) PostFile(path string, f io.ReadCloser) (io.ReadCloser, error) {
return r.execute(http.MethodPost, path, f)
}
func (r *request) Put(path string, data interface{}) (io.ReadCloser, error) {
return r.execute(http.MethodPut, path, data)
}
func (r *request) Delete(path string) error {
_, err := r.execute(http.MethodDelete, path, nil)
return err
}
func (r *request) execute(method, path string, data interface{}) (io.ReadCloser, error) {
if r.endpoint == "" {
return nil, ErrEmptyEndpoint
}
if r.endpoint[len(r.endpoint)-1:] == "/" {
r.endpoint = r.endpoint[:len(r.endpoint)-1]
}
u, err := url.Parse(r.endpoint + path)
if err != nil {
return nil, err
}
request := &http.Request{
URL: u,
Method: method,
Header: r.buildHeaders(),
}
if r.username != "" && r.password != "" {
request.SetBasicAuth(r.username, r.password)
}
if data != nil {
switch data := data.(type) {
case io.ReadCloser:
request.Body = data
default:
request.Body = io.NopCloser(bytes.NewBuffer(r.toJSON(data)))
}
}
client := r.buildClient()
response, err := client.Do(request)
if err != nil {
return nil, err
}
switch response.StatusCode {
case http.StatusUnauthorized:
response.Body.Close()
return nil, ErrNotAuthorized
case http.StatusForbidden:
response.Body.Close()
return nil, ErrForbidden
case http.StatusInternalServerError:
defer response.Body.Close()
var resp errorResponse
decoder := json.NewDecoder(response.Body)
// If we failed to decode, just return a generic ErrServerError
if err := decoder.Decode(&resp); err != nil {
return nil, ErrServerError
}
return nil, errors.New("miniflux: internal server error: " + resp.ErrorMessage)
case http.StatusNotFound:
response.Body.Close()
return nil, ErrNotFound
case http.StatusNoContent:
response.Body.Close()
return nil, nil
case http.StatusBadRequest:
defer response.Body.Close()
var resp errorResponse
decoder := json.NewDecoder(response.Body)
if err := decoder.Decode(&resp); err != nil {
return nil, fmt.Errorf("%w (%v)", ErrBadRequest, err)
}
return nil, fmt.Errorf("%w (%s)", ErrBadRequest, resp.ErrorMessage)
}
if response.StatusCode > 400 {
response.Body.Close()
return nil, fmt.Errorf("miniflux: status code=%d", response.StatusCode)
}
return response.Body, nil
}
func (r *request) buildClient() http.Client {
return http.Client{
Timeout: defaultTimeout * time.Second,
}
}
func (r *request) buildHeaders() http.Header {
headers := make(http.Header)
headers.Add("User-Agent", userAgent)
headers.Add("Content-Type", "application/json")
headers.Add("Accept", "application/json")
if r.apiKey != "" {
headers.Add("X-Auth-Token", r.apiKey)
}
return headers
}
func (r *request) toJSON(v interface{}) []byte {
b, err := json.Marshal(v)
if err != nil {
log.Println("Unable to convert interface to JSON:", err)
return []byte("")
}
return b
}
v2-2.2.6/contrib/ 0000775 0000000 0000000 00000000000 14756465373 0013534 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/README.md 0000664 0000000 0000000 00000000342 14756465373 0015012 0 ustar 00root root 0000000 0000000 The contrib directory contains various useful things contributed by the community.
Community contributions are not officially supported by the maintainers.
There is no guarantee whatsoever that anything in this folder works.
v2-2.2.6/contrib/ansible/ 0000775 0000000 0000000 00000000000 14756465373 0015151 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/inventories/ 0000775 0000000 0000000 00000000000 14756465373 0017516 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/inventories/group_vars/ 0000775 0000000 0000000 00000000000 14756465373 0021705 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/inventories/group_vars/miniflux_vars.yml 0000664 0000000 0000000 00000000406 14756465373 0025316 0 ustar 00root root 0000000 0000000 ---
miniflux_linux_user: miniflux
miniflux_db_user_name: miniflux_db_user
miniflux_db_user_password: miniflux_db_user_password
miniflux_db: miniflux_db
miniflux_admin_name: admin
miniflux_admin_passwort: miniflux_admin_password
miniflux_port: 8080
v2-2.2.6/contrib/ansible/playbooks/ 0000775 0000000 0000000 00000000000 14756465373 0017154 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/playbooks/playbook.yml 0000664 0000000 0000000 00000000120 14756465373 0021510 0 ustar 00root root 0000000 0000000 ---
- hosts: miniflux
roles:
- { role: mgrote.miniflux, tags: "miniflux" } v2-2.2.6/contrib/ansible/roles/ 0000775 0000000 0000000 00000000000 14756465373 0016275 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/ 0000775 0000000 0000000 00000000000 14756465373 0021424 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/README.md 0000664 0000000 0000000 00000001050 14756465373 0022677 0 ustar 00root root 0000000 0000000 ## mgrote.miniflux
### Details
Installs and configures Miniflux v2 with ansible
### Works on...
- [x] Ubuntu (>=18.04)
### Variables and Defaults
##### Linux User
miniflux_linux_user: miniflux
##### DB User
miniflux_db_user_name: miniflux_db_user
##### DB Password
miniflux_db_user_password: qqqqqqqqqqqqq
##### Database
miniflux_db: miniflux_db
##### Username Miniflux Admin
miniflux_admin_name: admin
##### Password Miniflux Admin
miniflux_admin_passwort: hallowelt
##### Port for Miniflux Frontend
miniflux_port: 8080
v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/defaults/ 0000775 0000000 0000000 00000000000 14756465373 0023233 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/defaults/main.yml 0000664 0000000 0000000 00000000000 14756465373 0024670 0 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/handlers/ 0000775 0000000 0000000 00000000000 14756465373 0023224 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/handlers/main.yml 0000664 0000000 0000000 00000000330 14756465373 0024667 0 ustar 00root root 0000000 0000000 ---
- name: start_miniflux.service
become: yes
systemd:
name: miniflux
state: restarted
enabled: yes
# wait 15 seconds(for systemd)
- name: miniflux_wait
wait_for:
timeout: 15
v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/tasks/ 0000775 0000000 0000000 00000000000 14756465373 0022551 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/tasks/main.yml 0000664 0000000 0000000 00000001630 14756465373 0024220 0 ustar 00root root 0000000 0000000 - name: add Apt-key for miniflux-repo
become: yes
apt_key:
url: https://apt.miniflux.app/KEY.gpg
state: present
- name: add miniflux-repo
become: yes
apt_repository:
repo: 'deb https://apt.miniflux.app/ /'
state: present
filename: miniflux_repo
update_cache: yes
- name: install miniflux
become: yes
apt:
name: miniflux
state: present
- name: add miniflux linux_user
become: yes
user:
name: "{{ miniflux_linux_user }}"
home: "/var/empty"
create_home: "no"
system: "yes"
shell: "/bin/false"
- name: create directory "/etc/miniflux.d"
become: yes
file:
path: /etc/miniflux.d
state: directory
- name: copy miniflux.conf
become: yes
template:
src: "miniflux.conf"
dest: "/etc/miniflux.conf"
notify:
- start_miniflux.service
- miniflux_wait
v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/templates/ 0000775 0000000 0000000 00000000000 14756465373 0023422 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/ansible/roles/mgrote.miniflux/templates/miniflux.conf 0000664 0000000 0000000 00000000732 14756465373 0026126 0 ustar 00root root 0000000 0000000 # See https://docs.miniflux.app/
LISTEN_ADDR=0.0.0.0:{{ miniflux_port }}
DATABASE_URL=user={{ miniflux_db_user_name }} password={{ miniflux_db_user_password }} dbname={{ miniflux_db }} sslmode=disable
POLLING_FREQUENCY=15
PROXY_IMAGES=http-only
# Run SQL migrations automatically:
RUN_MIGRATIONS=1
CREATE_ADMIN=1
ADMIN_USERNAME={{ miniflux_admin_name }}
ADMIN_PASSWORD={{ miniflux_admin_passwort }}
POLLING_FREQUENCY=10
# Options: https://miniflux.app/miniflux.1.html
v2-2.2.6/contrib/bruno/ 0000775 0000000 0000000 00000000000 14756465373 0014661 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/bruno/README.md 0000664 0000000 0000000 00000000314 14756465373 0016136 0 ustar 00root root 0000000 0000000 This folder contains Miniflux API collection for [Bruno](https://www.usebruno.com).
Bruno is a lightweight alternative to Postman/Insomnia.
- https://www.usebruno.com
- https://github.com/usebruno/bruno v2-2.2.6/contrib/bruno/miniflux/ 0000775 0000000 0000000 00000000000 14756465373 0016514 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/bruno/miniflux/Bookmark an entry.bru 0000664 0000000 0000000 00000000530 14756465373 0022472 0 ustar 00root root 0000000 0000000 meta {
name: Bookmark an entry
type: http
seq: 37
}
put {
url: {{minifluxBaseURL}}/v1/entries/{{entryID}}/bookmark
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
entryID: 1698
}
v2-2.2.6/contrib/bruno/miniflux/Create a feed.bru 0000664 0000000 0000000 00000000430 14756465373 0021513 0 ustar 00root root 0000000 0000000 meta {
name: Create a feed
type: http
seq: 19
}
post {
url: {{minifluxBaseURL}}/v1/feeds
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
v2-2.2.6/contrib/bruno/miniflux/Create a new category.bru 0000664 0000000 0000000 00000000411 14756465373 0023176 0 ustar 00root root 0000000 0000000 meta {
name: Create a new category
type: http
seq: 10
}
post {
url: {{minifluxBaseURL}}/v1/categories
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test"
}
}
v2-2.2.6/contrib/bruno/miniflux/Create a new user.bru 0000664 0000000 0000000 00000000441 14756465373 0022342 0 ustar 00root root 0000000 0000000 meta {
name: Create a new user
type: http
seq: 5
}
post {
url: {{minifluxBaseURL}}/v1/users
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"username": "foobar",
"password": "secret123"
}
}
v2-2.2.6/contrib/bruno/miniflux/Delete a category.bru 0000664 0000000 0000000 00000000503 14756465373 0022425 0 ustar 00root root 0000000 0000000 meta {
name: Delete a category
type: http
seq: 12
}
delete {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Delete a feed.bru 0000664 0000000 0000000 00000000472 14756465373 0021520 0 ustar 00root root 0000000 0000000 meta {
name: Delete a feed
type: http
seq: 26
}
delete {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
feedID: 18
}
v2-2.2.6/contrib/bruno/miniflux/Delete a user.bru 0000664 0000000 0000000 00000000456 14756465373 0021575 0 ustar 00root root 0000000 0000000 meta {
name: Delete a user
type: http
seq: 7
}
delete {
url: {{minifluxBaseURL}}/v1/users/{{userID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"language": "fr_FR"
}
}
vars:pre-request {
userID: 2
}
v2-2.2.6/contrib/bruno/miniflux/Discover feeds.bru 0000664 0000000 0000000 00000000416 14756465373 0022054 0 ustar 00root root 0000000 0000000 meta {
name: Discover feeds
type: http
seq: 18
}
post {
url: {{minifluxBaseURL}}/v1/discover
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"url": "https://miniflux.app"
}
}
v2-2.2.6/contrib/bruno/miniflux/Fetch entry website content.bru 0000664 0000000 0000000 00000000547 14756465373 0024465 0 ustar 00root root 0000000 0000000 meta {
name: Fetch entry website content
type: http
seq: 39
}
get {
url: {{minifluxBaseURL}}/v1/entries/{{entryID}}/fetch-content
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
entryID: 1698
}
v2-2.2.6/contrib/bruno/miniflux/Flush history.bru 0000664 0000000 0000000 00000000421 14756465373 0021766 0 ustar 00root root 0000000 0000000 meta {
name: Flush history
type: http
seq: 40
}
put {
url: {{minifluxBaseURL}}/v1/flush-history
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"url": "https://miniflux.app"
}
}
v2-2.2.6/contrib/bruno/miniflux/Get a single entry.bru 0000664 0000000 0000000 00000000520 14756465373 0022527 0 ustar 00root root 0000000 0000000 meta {
name: Get a single entry
type: http
seq: 36
}
get {
url: {{minifluxBaseURL}}/v1/entries/{{entryID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
entryID: 1698
}
v2-2.2.6/contrib/bruno/miniflux/Get a single feed entry.bru 0000664 0000000 0000000 00000000563 14756465373 0023422 0 ustar 00root root 0000000 0000000 meta {
name: Get a single feed entry
type: http
seq: 33
}
get {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}/entries/{{entryID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
feedID: 19
entryID: 1698
}
v2-2.2.6/contrib/bruno/miniflux/Get a single feed.bru 0000664 0000000 0000000 00000000511 14756465373 0022271 0 ustar 00root root 0000000 0000000 meta {
name: Get a single feed
type: http
seq: 24
}
get {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
feedID: 18
}
v2-2.2.6/contrib/bruno/miniflux/Get a single user by ID.bru 0000664 0000000 0000000 00000000406 14756465373 0023217 0 ustar 00root root 0000000 0000000 meta {
name: Get a single user by ID
type: http
seq: 3
}
get {
url: {{minifluxBaseURL}}/v1/users/{{userID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
vars:pre-request {
userID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Get a single user by username.bru 0000664 0000000 0000000 00000000424 14756465373 0024542 0 ustar 00root root 0000000 0000000 meta {
name: Get a single user by username
type: http
seq: 4
}
get {
url: {{minifluxBaseURL}}/v1/users/{{username}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
vars:pre-request {
username: admin
}
v2-2.2.6/contrib/bruno/miniflux/Get all categories.bru 0000664 0000000 0000000 00000000331 14756465373 0022601 0 ustar 00root root 0000000 0000000 meta {
name: Get all categories
type: http
seq: 9
}
get {
url: {{minifluxBaseURL}}/v1/categories
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
v2-2.2.6/contrib/bruno/miniflux/Get all entries.bru 0000664 0000000 0000000 00000000433 14756465373 0022130 0 ustar 00root root 0000000 0000000 meta {
name: Get all entries
type: http
seq: 34
}
get {
url: {{minifluxBaseURL}}/v1/entries
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
v2-2.2.6/contrib/bruno/miniflux/Get all feeds.bru 0000664 0000000 0000000 00000000427 14756465373 0021550 0 ustar 00root root 0000000 0000000 meta {
name: Get all feeds
type: http
seq: 20
}
get {
url: {{minifluxBaseURL}}/v1/feeds
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
v2-2.2.6/contrib/bruno/miniflux/Get all users.bru 0000664 0000000 0000000 00000000317 14756465373 0021621 0 ustar 00root root 0000000 0000000 meta {
name: Get all users
type: http
seq: 2
}
get {
url: {{minifluxBaseURL}}/v1/users
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
v2-2.2.6/contrib/bruno/miniflux/Get category entries.bru 0000664 0000000 0000000 00000000513 14756465373 0023174 0 ustar 00root root 0000000 0000000 meta {
name: Get category entries
type: http
seq: 16
}
get {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}/entries
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 2
}
v2-2.2.6/contrib/bruno/miniflux/Get category entry.bru 0000664 0000000 0000000 00000000542 14756465373 0022666 0 ustar 00root root 0000000 0000000 meta {
name: Get category entry
type: http
seq: 17
}
get {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}/entries/{{entryID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 2
entryID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Get category feeds.bru 0000664 0000000 0000000 00000000507 14756465373 0022614 0 ustar 00root root 0000000 0000000 meta {
name: Get category feeds
type: http
seq: 14
}
get {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}/feeds
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 2
}
v2-2.2.6/contrib/bruno/miniflux/Get current user.bru 0000664 0000000 0000000 00000000317 14756465373 0022350 0 ustar 00root root 0000000 0000000 meta {
name: Get current user
type: http
seq: 1
}
get {
url: {{minifluxBaseURL}}/v1/me
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
v2-2.2.6/contrib/bruno/miniflux/Get feed counters.bru 0000664 0000000 0000000 00000000444 14756465373 0022456 0 ustar 00root root 0000000 0000000 meta {
name: Get feed counters
type: http
seq: 21
}
get {
url: {{minifluxBaseURL}}/v1/feeds/counters
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
v2-2.2.6/contrib/bruno/miniflux/Get feed entries.bru 0000664 0000000 0000000 00000000520 14756465373 0022260 0 ustar 00root root 0000000 0000000 meta {
name: Get feed entries
type: http
seq: 32
}
get {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}/entries
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
feedID: 19
}
v2-2.2.6/contrib/bruno/miniflux/Get feed icon by feed ID.bru 0000664 0000000 0000000 00000000507 14756465373 0023300 0 ustar 00root root 0000000 0000000 meta {
name: Get feed icon by feed ID
type: http
seq: 27
}
get {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}/icon
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
feedID: 19
}
v2-2.2.6/contrib/bruno/miniflux/Get feed icon by icon ID.bru 0000664 0000000 0000000 00000000502 14756465373 0023320 0 ustar 00root root 0000000 0000000 meta {
name: Get feed icon by icon ID
type: http
seq: 28
}
get {
url: {{minifluxBaseURL}}/v1/icons/{{iconID}}
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
iconID: 11
}
v2-2.2.6/contrib/bruno/miniflux/Get version and build information.bru 0000664 0000000 0000000 00000000346 14756465373 0025527 0 ustar 00root root 0000000 0000000 meta {
name: Get version and build information
type: http
seq: 42
}
get {
url: {{minifluxBaseURL}}/v1/version
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
v2-2.2.6/contrib/bruno/miniflux/Mark all category entries as read.bru 0000664 0000000 0000000 00000000541 14756465373 0025361 0 ustar 00root root 0000000 0000000 meta {
name: Mark all category entries as read
type: http
seq: 13
}
put {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}/mark-all-as-read
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 2
}
v2-2.2.6/contrib/bruno/miniflux/Mark all user entries as read.bru 0000664 0000000 0000000 00000000517 14756465373 0024525 0 ustar 00root root 0000000 0000000 meta {
name: Mark all user entries as read
type: http
seq: 8
}
put {
url: {{minifluxBaseURL}}/v1/users/{{userID}}/mark-all-as-read
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
userID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Mark feed as read.bru 0000664 0000000 0000000 00000000514 14756465373 0022264 0 ustar 00root root 0000000 0000000 meta {
name: Mark feed as read
type: http
seq: 29
}
put {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}/mark-all-as-read
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
feedID: 19
}
v2-2.2.6/contrib/bruno/miniflux/OPML Export.bru 0000664 0000000 0000000 00000000453 14756465373 0021241 0 ustar 00root root 0000000 0000000 meta {
name: OPML Export
type: http
seq: 30
}
get {
url: {{minifluxBaseURL}}/v1/export
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
feedID: 19
}
v2-2.2.6/contrib/bruno/miniflux/OPML Import.bru 0000664 0000000 0000000 00000001240 14756465373 0021225 0 ustar 00root root 0000000 0000000 meta {
name: OPML Import
type: http
seq: 31
}
post {
url: {{minifluxBaseURL}}/v1/import
body: xml
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
body:xml {
Miniflux
}
vars:pre-request {
feedID: 19
}
v2-2.2.6/contrib/bruno/miniflux/Refresh a single feed.bru 0000664 0000000 0000000 00000000525 14756465373 0023155 0 ustar 00root root 0000000 0000000 meta {
name: Refresh a single feed
type: http
seq: 23
}
put {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}/refresh
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
feedID: 18
}
v2-2.2.6/contrib/bruno/miniflux/Refresh all feeds.bru 0000664 0000000 0000000 00000000443 14756465373 0022425 0 ustar 00root root 0000000 0000000 meta {
name: Refresh all feeds
type: http
seq: 22
}
put {
url: {{minifluxBaseURL}}/v1/feeds/refresh
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
v2-2.2.6/contrib/bruno/miniflux/Refresh category feeds.bru 0000664 0000000 0000000 00000000515 14756465373 0023472 0 ustar 00root root 0000000 0000000 meta {
name: Refresh category feeds
type: http
seq: 15
}
put {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}/refresh
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 2
}
v2-2.2.6/contrib/bruno/miniflux/Save an entry.bru 0000664 0000000 0000000 00000000521 14756465373 0021623 0 ustar 00root root 0000000 0000000 meta {
name: Save an entry
type: http
seq: 38
}
post {
url: {{minifluxBaseURL}}/v1/entries/{{entryID}}/save
body: none
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"feed_url": "https://miniflux.app/feed.xml"
}
}
vars:pre-request {
entryID: 1698
}
v2-2.2.6/contrib/bruno/miniflux/Update a category.bru 0000664 0000000 0000000 00000000500 14756465373 0022442 0 ustar 00root root 0000000 0000000 meta {
name: Update a category
type: http
seq: 11
}
put {
url: {{minifluxBaseURL}}/v1/categories/{{categoryID}}
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "Test Update"
}
}
vars:pre-request {
categoryID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Update a feed.bru 0000664 0000000 0000000 00000000467 14756465373 0021544 0 ustar 00root root 0000000 0000000 meta {
name: Update a feed
type: http
seq: 25
}
put {
url: {{minifluxBaseURL}}/v1/feeds/{{feedID}}
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"user_agent": "My user agent"
}
}
vars:pre-request {
feedID: 18
}
v2-2.2.6/contrib/bruno/miniflux/Update a user.bru 0000664 0000000 0000000 00000000453 14756465373 0021612 0 ustar 00root root 0000000 0000000 meta {
name: Update a user
type: http
seq: 6
}
put {
url: {{minifluxBaseURL}}/v1/users/{{userID}}
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"language": "fr_FR"
}
}
vars:pre-request {
userID: 1
}
v2-2.2.6/contrib/bruno/miniflux/Update entries status.bru 0000664 0000000 0000000 00000000445 14756465373 0023411 0 ustar 00root root 0000000 0000000 meta {
name: Update entries status
type: http
seq: 35
}
put {
url: {{minifluxBaseURL}}/v1/entries
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"entry_ids": [1698, 1699],
"status": "read"
}
}
v2-2.2.6/contrib/bruno/miniflux/Update entry.bru 0000664 0000000 0000000 00000000517 14756465373 0021575 0 ustar 00root root 0000000 0000000 meta {
name: Update entry
type: http
seq: 41
}
put {
url: {{minifluxBaseURL}}/v1/entries/{{entryID}}
body: json
auth: basic
}
auth:basic {
username: {{minifluxUsername}}
password: {{minifluxPassword}}
}
body:json {
{
"title": "New title",
"content": "Some text"
}
}
vars:pre-request {
entryID: 1789
}
v2-2.2.6/contrib/bruno/miniflux/bruno.json 0000664 0000000 0000000 00000000102 14756465373 0020525 0 ustar 00root root 0000000 0000000 {
"version": "1",
"name": "Miniflux",
"type": "collection"
} v2-2.2.6/contrib/bruno/miniflux/environments/ 0000775 0000000 0000000 00000000000 14756465373 0021243 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/bruno/miniflux/environments/Local.bru 0000664 0000000 0000000 00000000157 14756465373 0023012 0 ustar 00root root 0000000 0000000 vars {
minifluxBaseURL: http://127.0.0.1:8080
minifluxUsername: admin
}
vars:secret [
minifluxPassword
]
v2-2.2.6/contrib/docker-compose/ 0000775 0000000 0000000 00000000000 14756465373 0016446 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/docker-compose/Caddyfile 0000664 0000000 0000000 00000000061 14756465373 0020252 0 ustar 00root root 0000000 0000000 miniflux.example.org
reverse_proxy miniflux:8080
v2-2.2.6/contrib/docker-compose/README.md 0000664 0000000 0000000 00000000446 14756465373 0017731 0 ustar 00root root 0000000 0000000 Docker-Compose Examples
=======================
Here are few Docker Compose examples:
- `basic.yml`: Basic example
- `caddy.yml`: Use Caddy as reverse-proxy with automatic HTTPS
- `traefik.yml`: Use Traefik as reverse-proxy with automatic HTTPS
```bash
docker compose -f basic.yml up -d
```
v2-2.2.6/contrib/docker-compose/basic.yml 0000664 0000000 0000000 00000001616 14756465373 0020256 0 ustar 00root root 0000000 0000000 services:
miniflux:
image: ${MINIFLUX_IMAGE:-miniflux/miniflux:latest}
container_name: miniflux
restart: always
ports:
- "80:8080"
depends_on:
db:
condition: service_healthy
environment:
- DATABASE_URL=postgres://miniflux:secret@db/miniflux?sslmode=disable
- RUN_MIGRATIONS=1
- CREATE_ADMIN=1
- ADMIN_USERNAME=admin
- ADMIN_PASSWORD=test123
- DEBUG=1
# Optional health check:
# healthcheck:
# test: ["CMD", "/usr/bin/miniflux", "-healthcheck", "auto"]
db:
image: postgres:15
container_name: postgres
environment:
- POSTGRES_USER=miniflux
- POSTGRES_PASSWORD=secret
- POSTGRES_DB=miniflux
volumes:
- miniflux-db:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "miniflux"]
interval: 10s
start_period: 30s
volumes:
miniflux-db:
v2-2.2.6/contrib/docker-compose/caddy.yml 0000664 0000000 0000000 00000001775 14756465373 0020267 0 ustar 00root root 0000000 0000000 services:
caddy:
image: caddy:2
container_name: caddy
depends_on:
- miniflux
ports:
- "80:80"
- "443:443"
volumes:
- $PWD/Caddyfile:/etc/caddy/Caddyfile
- caddy_data:/data
- caddy_config:/config
miniflux:
image: ${MINIFLUX_IMAGE:-miniflux/miniflux:latest}
container_name: miniflux
depends_on:
db:
condition: service_healthy
environment:
- DATABASE_URL=postgres://miniflux:secret@db/miniflux?sslmode=disable
- RUN_MIGRATIONS=1
- CREATE_ADMIN=1
- ADMIN_USERNAME=admin
- ADMIN_PASSWORD=test123
- BASE_URL=https://miniflux.example.org
db:
image: postgres:15
container_name: postgres
environment:
- POSTGRES_USER=miniflux
- POSTGRES_PASSWORD=secret
volumes:
- miniflux-db:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "miniflux"]
interval: 10s
start_period: 30s
volumes:
miniflux-db:
caddy_data:
caddy_config:
v2-2.2.6/contrib/docker-compose/traefik.yml 0000664 0000000 0000000 00000003133 14756465373 0020616 0 ustar 00root root 0000000 0000000 services:
traefik:
image: "traefik:v2.3"
container_name: traefik
command:
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
- "--entrypoints.websecure.address=:443"
- "--certificatesresolvers.myresolver.acme.tlschallenge=true"
- "--certificatesresolvers.myresolver.acme.email=postmaster@example.com"
- "--certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme.json"
depends_on:
- miniflux
ports:
- "443:443"
volumes:
- "./letsencrypt:/letsencrypt"
- "/var/run/docker.sock:/var/run/docker.sock:ro"
miniflux:
image: ${MINIFLUX_IMAGE:-miniflux/miniflux:latest}
container_name: miniflux
depends_on:
db:
condition: service_healthy
expose:
- "8080"
environment:
- DATABASE_URL=postgres://miniflux:secret@db/miniflux?sslmode=disable
- RUN_MIGRATIONS=1
- CREATE_ADMIN=1
- ADMIN_USERNAME=admin
- ADMIN_PASSWORD=test123
- BASE_URL=https://miniflux.example.org
labels:
- "traefik.enable=true"
- "traefik.http.routers.miniflux.rule=Host(`miniflux.example.org`)"
- "traefik.http.routers.miniflux.entrypoints=websecure"
- "traefik.http.routers.miniflux.tls.certresolver=myresolver"
db:
image: postgres:15
container_name: postgres
environment:
- POSTGRES_USER=miniflux
- POSTGRES_PASSWORD=secret
volumes:
- miniflux-db:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "miniflux"]
interval: 10s
start_period: 30s
volumes:
miniflux-db:
v2-2.2.6/contrib/grafana/ 0000775 0000000 0000000 00000000000 14756465373 0015133 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/grafana/README.md 0000664 0000000 0000000 00000000037 14756465373 0016412 0 ustar 00root root 0000000 0000000 Grafana Dashboard for Miniflux
v2-2.2.6/contrib/grafana/dashboard.json 0000664 0000000 0000000 00000115501 14756465373 0017760 0 ustar 00root root 0000000 0000000 {
"__inputs": [
{
"name": "DS_PROMETHEUS",
"label": "prometheus",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "bargauge",
"name": "Bar gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "10.4.3"
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"panels": [
{
"collapsed": false,
"datasource": {
"uid": "Prometheus"
},
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 0
},
"id": 24,
"panels": [],
"targets": [
{
"datasource": {
"uid": "Prometheus"
},
"refId": "A"
}
],
"title": "Application",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 8,
"x": 0,
"y": 1
},
"id": 18,
"options": {
"displayMode": "basic",
"maxVizHeight": 300,
"minVizHeight": 16,
"minVizWidth": 8,
"namePlacement": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"last"
],
"fields": "",
"values": false
},
"showUnfilled": true,
"sizing": "auto",
"valueMode": "color"
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_feeds{status=\"total\"})",
"hide": false,
"interval": "",
"legendFormat": "Total",
"refId": "D"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_feeds{status=\"enabled\"})",
"hide": false,
"interval": "",
"legendFormat": "Enabled",
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_broken_feeds)",
"interval": "",
"legendFormat": "Broken",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_feeds{status=\"disabled\"})",
"interval": "",
"legendFormat": "Disabled",
"refId": "B"
}
],
"title": "Feeds",
"type": "bargauge"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 3,
"w": 4,
"x": 8,
"y": 1
},
"id": 2,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"last"
],
"fields": "",
"values": false
},
"showPercentChange": false,
"textMode": "auto",
"wideLayout": true
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_users)",
"interval": "",
"legendFormat": "Users",
"refId": "A"
}
],
"title": "Users",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 50,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 12,
"y": 1
},
"id": 4,
"options": {
"legend": {
"calcs": [
"lastNotNull"
],
"displayMode": "table",
"placement": "right",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_entries{status=\"total\"})",
"hide": false,
"interval": "",
"legendFormat": "Total",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_entries{status=\"unread\"})",
"hide": false,
"interval": "",
"legendFormat": "Unread",
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_entries{status=\"read\"})",
"interval": "",
"legendFormat": "Read",
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "max(miniflux_entries{status=\"removed\"})",
"interval": "",
"legendFormat": "Removed",
"refId": "D"
}
],
"title": "Entries by Status",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"description": "",
"fieldConfig": {
"defaults": {
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "decbytes"
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 4,
"x": 8,
"y": 4
},
"id": 36,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "center",
"orientation": "vertical",
"reduceOptions": {
"calcs": [
"last"
],
"fields": "",
"values": false
},
"showPercentChange": false,
"textMode": "value",
"wideLayout": true
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_sys_bytes{job=\"miniflux\"}",
"interval": "",
"legendFormat": "{{ instance }} - Memory Used",
"refId": "A"
}
],
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 8
},
"id": 22,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "histogram_quantile(0.95, sum(rate(miniflux_scraper_request_duration_bucket[5m])) by (le))",
"interval": "",
"legendFormat": "Request Duration",
"refId": "A"
}
],
"title": "Scraper Request Duration",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 8
},
"id": 20,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "histogram_quantile(0.95, sum(rate(miniflux_background_feed_refresh_duration_bucket[5m])) by (le))",
"interval": "",
"legendFormat": "Refresh Duration",
"refId": "A"
}
],
"title": "Background Feed Refresh Duration",
"type": "timeseries"
},
{
"collapsed": false,
"datasource": {
"uid": "Prometheus"
},
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 16
},
"id": 28,
"panels": [],
"targets": [
{
"datasource": {
"uid": "Prometheus"
},
"refId": "A"
}
],
"title": "Process",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "decbytes"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 17
},
"id": 16,
"options": {
"legend": {
"calcs": [
"lastNotNull"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_sys_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }}",
"refId": "A"
}
],
"title": "Total Used Memory",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 17
},
"id": 6,
"options": {
"legend": {
"calcs": [
"mean",
"lastNotNull",
"max",
"min"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "process_open_fds{job=\"miniflux\"}",
"interval": "",
"legendFormat": "{{instance }} - Open File Descriptors",
"refId": "A"
}
],
"title": "File Descriptors",
"type": "timeseries"
},
{
"collapsed": false,
"datasource": {
"uid": "Prometheus"
},
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 25
},
"id": 26,
"panels": [],
"targets": [
{
"datasource": {
"uid": "Prometheus"
},
"refId": "A"
}
],
"title": "Go Metrics",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "bytes"
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "alloc rate"
},
"properties": [
{
"id": "unit",
"value": "Bps"
}
]
}
]
},
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 26
},
"id": 12,
"options": {
"legend": {
"calcs": [
"mean",
"lastNotNull",
"max"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_alloc_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 2,
"legendFormat": "bytes allocated",
"metric": "go_memstats_alloc_bytes",
"refId": "A",
"step": 4
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(go_memstats_alloc_bytes_total{job=\"miniflux\"}[30s])",
"format": "time_series",
"interval": "",
"intervalFactor": 2,
"legendFormat": "alloc rate",
"metric": "go_memstats_alloc_bytes_total",
"refId": "B",
"step": 4
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_stack_inuse_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 2,
"legendFormat": "stack inuse",
"metric": "go_memstats_stack_inuse_bytes",
"refId": "C",
"step": 4
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_inuse_bytes{job=\"miniflux\"}",
"format": "time_series",
"hide": false,
"interval": "",
"intervalFactor": 2,
"legendFormat": "heap inuse",
"metric": "go_memstats_heap_inuse_bytes",
"refId": "D",
"step": 4
}
],
"title": "Golang Memory",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 12,
"y": 26
},
"id": 8,
"options": {
"legend": {
"calcs": [
"mean",
"lastNotNull",
"max",
"min"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_goroutines{job=\"miniflux\"}",
"interval": "",
"legendFormat": "{{ instance }} - Goroutines",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_threads{job=\"miniflux\"}",
"interval": "",
"legendFormat": "{{ instance }} - OS threads",
"refId": "B"
}
],
"title": "Concurrency",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "decbytes"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 33
},
"id": 34,
"options": {
"legend": {
"calcs": [
"lastNotNull"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_stack_inuse_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - stack_inuse",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_stack_sys_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - stack_sys",
"refId": "B"
}
],
"title": "Memory in Stack",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "decbytes"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 33
},
"id": 32,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_alloc_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - heap_alloc",
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_sys_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - heap_sys",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_idle_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - heap_idle",
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_inuse_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - heap_inuse",
"refId": "D"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_heap_released_bytes{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }} - heap_released",
"refId": "E"
}
],
"title": "Memory in Heap",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 41
},
"id": 14,
"options": {
"legend": {
"calcs": [
"mean",
"lastNotNull",
"max"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_gc_duration_seconds{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 2,
"legendFormat": "{{instance}}: {{quantile}}",
"metric": "go_gc_duration_seconds",
"refId": "A",
"step": 4
}
],
"title": "GC Duration Quantiles",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 41
},
"id": 30,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "10.4.3",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "go_memstats_mallocs_total{job=\"miniflux\"} - go_memstats_frees_total{job=\"miniflux\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{ instance }}",
"refId": "A"
}
],
"title": "Number of Live Objects",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 39,
"tags": [],
"templating": {
"list": [
{
"current": {
"selected": false,
"text": "prometheus",
"value": "354cc25c-f240-4f6f-a2a9-2d68c22df64e"
},
"hide": 0,
"includeAll": false,
"label": "Datasource",
"multi": false,
"name": "DS_PROMETHEUS",
"options": [],
"query": "prometheus",
"queryValue": "",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"type": "datasource"
}
]
},
"time": {
"from": "now-24h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "Miniflux",
"uid": "vSaPgcFMk",
"version": 3,
"weekStart": ""
} v2-2.2.6/contrib/sysvinit/ 0000775 0000000 0000000 00000000000 14756465373 0015424 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/sysvinit/README.md 0000664 0000000 0000000 00000000202 14756465373 0016675 0 ustar 00root root 0000000 0000000
System-V init for e.g. http://devuan.org
Assumes an executable `/usr/local/bin/miniflux`.
Configure in `etc/default/miniflux`
v2-2.2.6/contrib/sysvinit/etc/ 0000775 0000000 0000000 00000000000 14756465373 0016177 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/sysvinit/etc/default/ 0000775 0000000 0000000 00000000000 14756465373 0017623 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/sysvinit/etc/default/miniflux 0000664 0000000 0000000 00000000516 14756465373 0021403 0 ustar 00root root 0000000 0000000 # sourced by /etc/init.d/miniflux
# see cluster port in pg_lsclusters and ls -Al /var/run/postgresql/
export DATABASE_URL='host=/var/run/postgresql/ port=5433 user=miniflux password= dbname=miniflux sslmode=disable'
export LISTEN_ADDR='127.0.0.1:8081'
export BASE_URL='https:// and path/'
v2-2.2.6/contrib/sysvinit/etc/init.d/ 0000775 0000000 0000000 00000000000 14756465373 0017364 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/sysvinit/etc/init.d/miniflux 0000775 0000000 0000000 00000006230 14756465373 0021146 0 ustar 00root root 0000000 0000000 #! /bin/sh
### BEGIN INIT INFO
# Provides: miniflux
# Required-Start: $syslog $network
# Required-Stop: $syslog
# Should-Start: postgresql
# Should-Stop: postgresql
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: A rss reader
# Description: A RSS reader
### END INIT INFO
# Author: Danny Boisvert
# Do NOT "set -e"
# PATH should only include /usr/* if it runs after the mountnfs.sh script
PATH=/sbin:/usr/sbin:/bin:/usr/bin
DESC="Miniflux"
NAME=miniflux
SERVICEVERBOSE=yes
PIDFILE=/var/run/$NAME.pid
SCRIPTNAME=/etc/init.d/$NAME
WORKINGDIR=/usr/local/bin
DAEMON=$WORKINGDIR/$NAME
DAEMON_ARGS=""
USER=nobody
# Read configuration variable file if it is present
[ -r /etc/default/$NAME ] && . /etc/default/$NAME
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
# Load the VERBOSE setting and other rcS variables
. /lib/init/vars.sh
# Define LSB log_* functions.
# Depend on lsb-base (>= 3.2-14) to ensure that this file is present
# and status_of_proc is working.
. /lib/lsb/init-functions
#
# Function that starts the daemon/service
#
do_start()
{
# Return
# 0 if daemon has been started
# 1 if daemon was already running
# 2 if daemon could not be started
sh -c "USER=$USER start-stop-daemon --start --quiet --pidfile $PIDFILE --make-pidfile \\
--test --chdir $WORKINGDIR --chuid $USER \\
--exec $DAEMON -- $DAEMON_ARGS > /dev/null \\
|| return 1"
sh -c "USER=$USER start-stop-daemon --start --quiet --pidfile $PIDFILE --make-pidfile \\
--background --chdir $WORKINGDIR --chuid $USER \\
--exec $DAEMON -- $DAEMON_ARGS \\
|| return 2"
}
#
# Function that stops the daemon/service
#
do_stop()
{
# Return
# 0 if daemon has been stopped
# 1 if daemon was already stopped
# 2 if daemon could not be stopped
# other if a failure occurred
start-stop-daemon --stop --quiet --retry=TERM/1/KILL/5 --pidfile $PIDFILE --name $NAME
RETVAL="$?"
[ "$RETVAL" = 2 ] && return 2
start-stop-daemon --stop --quiet --oknodo --retry=0/1/KILL/5 --exec $DAEMON
[ "$?" = 2 ] && return 2
# Many daemons don't delete their pidfiles when they exit.
rm -f $PIDFILE
return "$RETVAL"
}
case "$1" in
start)
[ "$SERVICEVERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
do_start
case "$?" in
0|1) [ "$SERVICEVERBOSE" != no ] && log_end_msg 0 ;;
2) [ "$SERVICEVERBOSE" != no ] && log_end_msg 1 ;;
esac
;;
stop)
[ "$SERVICEVERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
do_stop
case "$?" in
0|1) [ "$SERVICEVERBOSE" != no ] && log_end_msg 0 ;;
2) [ "$SERVICEVERBOSE" != no ] && log_end_msg 1 ;;
esac
;;
status)
status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
;;
restart|force-reload)
log_daemon_msg "Restarting $DESC" "$NAME"
do_stop
case "$?" in
0|1)
do_start
case "$?" in
0) log_end_msg 0 ;;
1) log_end_msg 1 ;; # Old process is still running
*) log_end_msg 1 ;; # Failed to start
esac
;;
*)
# Failed to stop
log_end_msg 1
;;
esac
;;
*)
echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2
exit 3
;;
esac
v2-2.2.6/contrib/thunder_client/ 0000775 0000000 0000000 00000000000 14756465373 0016543 5 ustar 00root root 0000000 0000000 v2-2.2.6/contrib/thunder_client/README.md 0000664 0000000 0000000 00000000451 14756465373 0020022 0 ustar 00root root 0000000 0000000 Miniflux API Collection for Thunder Client VS Code Extension
============================================================
Official website: https://www.thunderclient.com
This folder contains the API endpoints collection for Miniflux. You can import it locally to interact with the Miniflux API.
v2-2.2.6/contrib/thunder_client/collection.json 0000664 0000000 0000000 00000062715 14756465373 0021604 0 ustar 00root root 0000000 0000000 {
"client": "Thunder Client",
"collectionName": "Miniflux v2",
"dateExported": "2023-07-31T01:53:38.743Z",
"version": "1.1",
"folders": [],
"requests": [
{
"_id": "d23fb9ba-c0c1-46ff-93f4-c5ed24ecd56e",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Discover Subscriptions",
"url": "/v1/discover",
"method": "POST",
"sortNum": 20000,
"created": "2023-07-31T01:20:12.275Z",
"modified": "2023-07-31T01:29:39.751Z",
"headers": [],
"params": [],
"body": {
"type": "json",
"raw": "\n{\n \"url\": \"https://miniflux.app/\"\n}",
"form": []
},
"tests": []
},
{
"_id": "29cfc679-31d4-4d8c-b843-ab92a74dfa85",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Feeds",
"url": "/v1/feeds",
"method": "GET",
"sortNum": 50000,
"created": "2023-07-31T01:20:12.276Z",
"modified": "2023-07-31T01:20:12.276Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "52a88df8-41c7-47c2-a635-8c93d7d29f40",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Category Feeds",
"url": "/v1/categories/1/feeds",
"method": "GET",
"sortNum": 60000,
"created": "2023-07-31T01:20:12.277Z",
"modified": "2023-07-31T01:20:12.277Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "a5c2cb48-a4cf-4edc-a0e0-927d9f711843",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Feed",
"url": "/v1/feeds/{feedID}",
"method": "GET",
"sortNum": 70000,
"created": "2023-07-31T01:20:12.279Z",
"modified": "2023-07-31T01:31:11.478Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "fb55b058-c2ba-4785-be92-a98f0596e86e",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Feed Icon ",
"url": "/v1/feeds/{feedID}/icon",
"method": "GET",
"sortNum": 80000,
"created": "2023-07-31T01:20:12.280Z",
"modified": "2023-07-31T01:31:18.174Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "c0ec9a45-263e-4627-a13b-b5df901a6456",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Create Feed ",
"url": "/v1/feeds",
"method": "POST",
"sortNum": 90000,
"created": "2023-07-31T01:20:12.281Z",
"modified": "2023-07-31T01:31:31.415Z",
"headers": [],
"params": [],
"body": {
"type": "json",
"raw": "{\n \"feed_url\": \"https://miniflux.app/feed.xml\",\n \"category_id\": 1\n}",
"form": []
},
"tests": []
},
{
"_id": "f4c078a2-c031-4753-a7a4-4987439a61d0",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Update Feed",
"url": "/v1/feeds/{feedID}",
"method": "PUT",
"sortNum": 100000,
"created": "2023-07-31T01:20:12.282Z",
"modified": "2023-07-31T01:31:48.115Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "1",
"isPath": true
}
],
"body": {
"type": "json",
"raw": "{\n \"title\": \"Updated - New Feed Title\",\n \"category_id\": 1\n}",
"form": []
},
"tests": []
},
{
"_id": "1e47aeab-09ce-439b-907f-f9347b98b160",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Refresh Feed",
"url": "/v1/feeds/{feedID}/refresh",
"method": "PUT",
"sortNum": 110000,
"created": "2023-07-31T01:20:12.283Z",
"modified": "2023-07-31T01:31:58.778Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "4f643fa6-042d-4e95-8194-4cb0af7102bf",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Refresh All Feeds",
"url": "/v1/feeds/refresh",
"method": "PUT",
"sortNum": 115000,
"created": "2023-07-31T01:20:12.312Z",
"modified": "2023-07-31T01:20:12.312Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "d829f651-e9b9-41f9-aa9e-bd830d5e6389",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Remove Feed",
"url": "/v1/feeds/{feedID}",
"method": "DELETE",
"sortNum": 120000,
"created": "2023-07-31T01:20:12.284Z",
"modified": "2023-07-31T01:32:16.723Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "deafbf1a-d9e0-420f-a749-1bdde56772cb",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Feed Entries",
"url": "/v1/feeds/{feedID}/entries",
"method": "GET",
"sortNum": 130000,
"created": "2023-07-31T01:20:12.285Z",
"modified": "2023-07-31T01:32:52.812Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "2",
"isPath": true
}
],
"tests": []
},
{
"_id": "0052e903-75fc-48ec-8fd5-6e8784ed401a",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Entry",
"url": "/v1/entries/{entryID}",
"method": "GET",
"sortNum": 140000,
"created": "2023-07-31T01:20:12.286Z",
"modified": "2023-07-31T01:33:30.417Z",
"headers": [],
"params": [
{
"name": "entryID",
"value": "19",
"isPath": true
}
],
"tests": []
},
{
"_id": "1a055ace-2629-4298-9ea0-1bd17d59a4d6",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Fetch original article",
"url": "/v1/entries/{entryID}/fetch-content",
"method": "GET",
"sortNum": 150000,
"created": "2023-07-31T01:20:12.287Z",
"modified": "2023-07-31T01:33:41.014Z",
"headers": [],
"params": [
{
"name": "entryID",
"value": "19",
"isPath": true
}
],
"tests": []
},
{
"_id": "f272d1e6-ebbb-4c58-a159-4412ad657136",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Category Entries",
"url": "/v1/categories/{categoryID}/entries",
"method": "GET",
"sortNum": 160000,
"created": "2023-07-31T01:20:12.288Z",
"modified": "2023-07-31T01:20:12.288Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "856ed091-318a-4a76-b7ce-6475106dd6b5",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Mark All Feed Entries as Read",
"url": "/v1/feeds/{feedID}/mark-all-as-read",
"method": "PUT",
"sortNum": 180000,
"created": "2023-07-31T01:20:12.290Z",
"modified": "2023-07-31T01:46:57.443Z",
"headers": [],
"params": [
{
"name": "feedID",
"value": "2",
"isPath": true
}
],
"tests": []
},
{
"_id": "67749962-d646-45d5-8b78-a8eeaa7cb971",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Entries",
"url": "/v1/entries",
"method": "GET",
"sortNum": 190000,
"created": "2023-07-31T01:20:12.291Z",
"modified": "2023-07-31T01:20:12.291Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "b55ae165-2abe-41f0-8b8a-14d826238d20",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Change Entries Status",
"url": "/v1/entries",
"method": "PUT",
"sortNum": 200000,
"created": "2023-07-31T01:20:12.292Z",
"modified": "2023-07-31T01:46:46.133Z",
"headers": [],
"params": [],
"body": {
"type": "json",
"raw": "{\n \"entry_ids\": [19, 20],\n \"status\": \"read\"\n}",
"form": []
},
"tests": []
},
{
"_id": "710dfc55-fc4e-48ab-989e-3ed78019d6c3",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Toggle Entry Bookmark",
"url": "/v1/entries/{entryID}/bookmark",
"method": "PUT",
"sortNum": 210000,
"created": "2023-07-31T01:20:12.293Z",
"modified": "2023-07-31T01:45:51.933Z",
"headers": [],
"params": [
{
"name": "entryID",
"value": "19",
"isPath": true
}
],
"tests": []
},
{
"_id": "19edbe55-0a0a-4102-bde0-73ed6d8515f6",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Save Entry to Third-Party Service",
"url": "/v1/entries/{entryID}/save",
"method": "POST",
"sortNum": 215000,
"created": "2023-07-31T01:20:12.313Z",
"modified": "2023-07-31T01:20:12.313Z",
"headers": [],
"params": [
{
"name": "entryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "13d2cf52-aa08-4f7f-a83d-ffcb1e1190cd",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Categories",
"url": "/v1/categories",
"method": "GET",
"sortNum": 220000,
"created": "2023-07-31T01:20:12.294Z",
"modified": "2023-07-31T01:20:12.294Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "1547dabe-2bcb-4e06-acaa-fb393d1027e2",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Create Category ",
"url": "/v1/categories",
"method": "POST",
"sortNum": 230000,
"created": "2023-07-31T01:20:12.295Z",
"modified": "2023-07-31T01:20:12.295Z",
"headers": [],
"params": [],
"body": {
"type": "json",
"raw": "{\n \"title\": \"My category\"\n}",
"form": []
},
"tests": []
},
{
"_id": "e8dac503-19dc-434d-832f-eac4364785d8",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Update Category",
"url": "/v1/categories/{categoryID}",
"method": "PUT",
"sortNum": 232500,
"created": "2023-07-31T01:20:12.296Z",
"modified": "2023-07-31T01:42:55.831Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "3",
"isPath": true
}
],
"body": {
"type": "json",
"raw": "\n{\n \"title\": \"My new title\"\n}",
"form": []
},
"tests": []
},
{
"_id": "86d74247-7f12-4a6e-91b3-fad9e7b6b1fb",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Delete Category",
"url": "/v1/categories/{categoryID}",
"method": "DELETE",
"sortNum": 235000,
"created": "2023-07-31T01:20:12.298Z",
"modified": "2023-07-31T01:44:21.486Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "3",
"isPath": true
}
],
"tests": []
},
{
"_id": "668dde80-ed03-4fa6-ad2a-9cacd0ec31eb",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Mark Category Entries as Read",
"url": "/v1/categories/{categoryID}/mark-all-as-read",
"method": "PUT",
"sortNum": 237500,
"created": "2023-07-31T01:20:12.299Z",
"modified": "2023-07-31T01:43:50.637Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "39ada469-765e-4584-ab00-9d263bd526a1",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Category Feeds",
"url": "/v1/categories/{categoryID}/feeds",
"method": "GET",
"sortNum": 243750,
"created": "2023-07-31T01:50:23.959Z",
"modified": "2023-07-31T01:50:51.443Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "ec389c41-185f-4b57-a373-c6ff952b4282",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Refresh Category Feeds",
"url": "/v1/categories/{categoryID}/refresh",
"method": "PUT",
"sortNum": 250000,
"created": "2023-07-31T01:20:12.297Z",
"modified": "2023-07-31T01:43:23.102Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "bc4a7578-c95e-4436-bbfa-61ccc4a8fc71",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Category Entries",
"url": "/v1/categories/{categoryID}/entries",
"method": "GET",
"sortNum": 257500,
"created": "2023-07-31T01:51:15.403Z",
"modified": "2023-07-31T01:51:35.106Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "fa935fb3-3ed6-4ee3-b995-6c054766d109",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Category Entry",
"url": "/v1/categories/{categoryID}/entries/{entryID}",
"method": "GET",
"sortNum": 258750,
"created": "2023-07-31T01:51:46.699Z",
"modified": "2023-07-31T01:52:12.155Z",
"headers": [],
"params": [
{
"name": "categoryID",
"value": "1",
"isPath": true
},
{
"name": "entryID",
"value": "19",
"isPath": true
}
],
"tests": []
},
{
"_id": "cb6968e9-8d13-4410-9ad5-85847b73d7eb",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "OPML Export",
"url": "/v1/export",
"method": "GET",
"sortNum": 280000,
"created": "2023-07-31T01:20:12.300Z",
"modified": "2023-07-31T01:20:12.300Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "169a64e1-08dd-4760-b405-a748a5286b38",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "OPML Import",
"url": "/v1/import",
"method": "POST",
"sortNum": 290000,
"created": "2023-07-31T01:20:12.301Z",
"modified": "2023-07-31T01:41:31.218Z",
"headers": [],
"params": [],
"body": {
"type": "xml",
"raw": "\n\n \n Miniflux\n Sun, 30 Jul 2023 18:41:08 PDT\n \n \n \n \n \n \n",
"form": []
},
"tests": []
},
{
"_id": "bfb7264a-7b46-49fe-b451-fb6d9b03f0b2",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Create User",
"url": "/v1/users",
"method": "POST",
"sortNum": 300000,
"created": "2023-07-31T01:20:12.302Z",
"modified": "2023-07-31T01:20:12.302Z",
"headers": [],
"params": [],
"body": {
"type": "json",
"raw": "{\n \"username\": \"bob\",\n \"password\": \"test123\",\n \"is_admin\": false\n}",
"form": []
},
"tests": []
},
{
"_id": "93c1dcc2-bf09-4e8e-86ba-0c042147a48f",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Update User",
"url": "/v1/users/{userID}",
"method": "PUT",
"sortNum": 310000,
"created": "2023-07-31T01:20:12.303Z",
"modified": "2023-07-31T01:40:09.576Z",
"headers": [],
"params": [
{
"name": "userID",
"value": "2",
"isPath": true
}
],
"body": {
"type": "json",
"raw": "{\n \"username\": \"joe\"\n}",
"form": []
},
"tests": []
},
{
"_id": "19cf34c1-eb0a-4442-a682-2e94c4f5e594",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Current User",
"url": "/v1/me",
"method": "GET",
"sortNum": 320000,
"created": "2023-07-31T01:20:12.304Z",
"modified": "2023-07-31T01:20:12.304Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "4a700f7c-8762-4cab-aab1-2d8066884d69",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get User by ID",
"url": "/v1/users/{userID}",
"method": "GET",
"sortNum": 330000,
"created": "2023-07-31T01:20:12.305Z",
"modified": "2023-07-31T01:39:38.472Z",
"headers": [],
"params": [
{
"name": "userID",
"value": "1",
"isPath": true
}
],
"tests": []
},
{
"_id": "66cb0985-5ed4-4b1e-9029-8605b7f5f74e",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get User by username",
"url": "/v1/users/{username}",
"method": "GET",
"sortNum": 335000,
"created": "2023-07-31T01:47:53.649Z",
"modified": "2023-07-31T01:48:10.655Z",
"headers": [],
"params": [
{
"name": "username",
"value": "admin",
"isPath": true
}
],
"tests": []
},
{
"_id": "3d4b227a-83a2-4d87-a0ed-ce9d5497aea6",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Get Users",
"url": "/v1/users",
"method": "GET",
"sortNum": 340000,
"created": "2023-07-31T01:20:12.306Z",
"modified": "2023-07-31T01:20:12.306Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "90138dea-799a-4b44-ad68-fce6ec5898a6",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Delete User",
"url": "/v1/users/{userID}",
"method": "DELETE",
"sortNum": 350000,
"created": "2023-07-31T01:20:12.307Z",
"modified": "2023-07-31T01:40:38.124Z",
"headers": [],
"params": [
{
"name": "userID",
"value": "2",
"isPath": true
}
],
"tests": []
},
{
"_id": "4b3bf7ca-bc55-423b-a3ee-6279c10a0d85",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Fetch Read/Unread Counters",
"url": "/v1/feeds/counters",
"method": "GET",
"sortNum": 370000,
"created": "2023-07-31T01:20:12.309Z",
"modified": "2023-07-31T01:20:12.309Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "7721682f-31e3-4d71-8df9-02e30e4729d7",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Healthcheck",
"url": "/healthcheck",
"method": "GET",
"sortNum": 380000,
"created": "2023-07-31T01:20:12.310Z",
"modified": "2023-07-31T01:20:12.310Z",
"headers": [],
"params": [],
"tests": []
},
{
"_id": "64410254-b17a-43e4-984d-10b9b13c5818",
"colId": "fc35618a-f39f-40a0-a443-d4ae568baa8e",
"containerId": "",
"name": "Version",
"url": "/version",
"method": "GET",
"sortNum": 390000,
"created": "2023-07-31T01:20:12.311Z",
"modified": "2023-07-31T01:20:12.311Z",
"headers": [],
"params": [],
"tests": []
}
],
"settings": {
"auth": {
"type": "basic",
"basic": {
"username": "admin",
"password": "test123"
}
},
"options": {
"baseUrl": "http://localhost:8080"
}
}
} v2-2.2.6/go.mod 0000664 0000000 0000000 00000003027 14756465373 0013204 0 ustar 00root root 0000000 0000000 module miniflux.app/v2
// +heroku goVersion go1.23
require (
github.com/PuerkitoBio/goquery v1.10.2
github.com/andybalholm/brotli v1.1.1
github.com/coreos/go-oidc/v3 v3.12.0
github.com/go-webauthn/webauthn v0.11.2
github.com/gorilla/mux v1.8.1
github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.24
github.com/prometheus/client_golang v1.20.5
github.com/tdewolff/minify/v2 v2.21.3
golang.org/x/crypto v0.33.0
golang.org/x/image v0.24.0
golang.org/x/net v0.35.0
golang.org/x/oauth2 v0.26.0
golang.org/x/term v0.29.0
)
require (
github.com/go-webauthn/x v0.1.14 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/google/go-tpm v0.9.1 // indirect
)
require (
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
github.com/go-jose/go-jose/v4 v4.0.2 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/klauspost/compress v1.17.9 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.55.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/tdewolff/parse/v2 v2.7.19 // indirect
github.com/x448/float16 v0.8.4 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/text v0.22.0 // indirect
google.golang.org/protobuf v1.34.2 // indirect
)
go 1.23
v2-2.2.6/go.sum 0000664 0000000 0000000 00000031525 14756465373 0013235 0 ustar 00root root 0000000 0000000 github.com/PuerkitoBio/goquery v1.10.2 h1:7fh2BdHcG6VFZsK7toXBT/Bh1z5Wmy8Q9MV9HqT2AM8=
github.com/PuerkitoBio/goquery v1.10.2/go.mod h1:0guWGjcLu9AYC7C1GHnpysHy056u9aEkUHwhdnePMCU=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo=
github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/go-jose/go-jose/v4 v4.0.2 h1:R3l3kkBds16bO7ZFAEEcofK0MkrAJt3jlJznWZG0nvk=
github.com/go-jose/go-jose/v4 v4.0.2/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY=
github.com/go-webauthn/webauthn v0.11.2 h1:Fgx0/wlmkClTKlnOsdOQ+K5HcHDsDcYIvtYmfhEOSUc=
github.com/go-webauthn/webauthn v0.11.2/go.mod h1:aOtudaF94pM71g3jRwTYYwQTG1KyTILTcZqN1srkmD0=
github.com/go-webauthn/x v0.1.14 h1:1wrB8jzXAofojJPAaRxnZhRgagvLGnLjhCAwg3kTpT0=
github.com/go-webauthn/x v0.1.14/go.mod h1:UuVvFZ8/NbOnkDz3y1NaxtUN87pmtpC1PQ+/5BBQRdc=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-tpm v0.9.1 h1:0pGc4X//bAlmZzMKf8iz6IsDo1nYTbYJ6FZN/rg4zdM=
github.com/google/go-tpm v0.9.1/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc=
github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tdewolff/minify/v2 v2.21.3 h1:KmhKNGrN/dGcvb2WDdB5yA49bo37s+hcD8RiF+lioV8=
github.com/tdewolff/minify/v2 v2.21.3/go.mod h1:iGxHaGiONAnsYuo8CRyf8iPUcqRJVB/RhtEcTpqS7xw=
github.com/tdewolff/parse/v2 v2.7.19 h1:7Ljh26yj+gdLFEq/7q9LT4SYyKtwQX4ocNrj45UCePg=
github.com/tdewolff/parse/v2 v2.7.19/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA=
github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE=
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo=
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
golang.org/x/image v0.24.0 h1:AN7zRgVsbvmTfNyqIbbOraYL8mSwcKncEj8ofjgzcMQ=
golang.org/x/image v0.24.0/go.mod h1:4b/ITuLfqYq1hqZcjofwctIhi7sZh2WaCjvsBNjjya8=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
v2-2.2.6/internal/ 0000775 0000000 0000000 00000000000 14756465373 0013710 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/api/ 0000775 0000000 0000000 00000000000 14756465373 0014461 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/api/api.go 0000664 0000000 0000000 00000011663 14756465373 0015570 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"net/http"
"runtime"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/version"
"miniflux.app/v2/internal/worker"
"github.com/gorilla/mux"
)
type handler struct {
store *storage.Storage
pool *worker.Pool
router *mux.Router
}
// Serve declares API routes for the application.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
handler := &handler{store, pool, router}
sr := router.PathPrefix("/v1").Subrouter()
middleware := newMiddleware(store)
sr.Use(middleware.handleCORS)
sr.Use(middleware.apiKeyAuth)
sr.Use(middleware.basicAuth)
sr.Methods(http.MethodOptions)
sr.HandleFunc("/users", handler.createUser).Methods(http.MethodPost)
sr.HandleFunc("/users", handler.users).Methods(http.MethodGet)
sr.HandleFunc("/users/{userID:[0-9]+}", handler.userByID).Methods(http.MethodGet)
sr.HandleFunc("/users/{userID:[0-9]+}", handler.updateUser).Methods(http.MethodPut)
sr.HandleFunc("/users/{userID:[0-9]+}", handler.removeUser).Methods(http.MethodDelete)
sr.HandleFunc("/users/{userID:[0-9]+}/mark-all-as-read", handler.markUserAsRead).Methods(http.MethodPut)
sr.HandleFunc("/users/{username}", handler.userByUsername).Methods(http.MethodGet)
sr.HandleFunc("/me", handler.currentUser).Methods(http.MethodGet)
sr.HandleFunc("/categories", handler.createCategory).Methods(http.MethodPost)
sr.HandleFunc("/categories", handler.getCategories).Methods(http.MethodGet)
sr.HandleFunc("/categories/{categoryID}", handler.updateCategory).Methods(http.MethodPut)
sr.HandleFunc("/categories/{categoryID}", handler.removeCategory).Methods(http.MethodDelete)
sr.HandleFunc("/categories/{categoryID}/mark-all-as-read", handler.markCategoryAsRead).Methods(http.MethodPut)
sr.HandleFunc("/categories/{categoryID}/feeds", handler.getCategoryFeeds).Methods(http.MethodGet)
sr.HandleFunc("/categories/{categoryID}/refresh", handler.refreshCategory).Methods(http.MethodPut)
sr.HandleFunc("/categories/{categoryID}/entries", handler.getCategoryEntries).Methods(http.MethodGet)
sr.HandleFunc("/categories/{categoryID}/entries/{entryID}", handler.getCategoryEntry).Methods(http.MethodGet)
sr.HandleFunc("/discover", handler.discoverSubscriptions).Methods(http.MethodPost)
sr.HandleFunc("/feeds", handler.createFeed).Methods(http.MethodPost)
sr.HandleFunc("/feeds", handler.getFeeds).Methods(http.MethodGet)
sr.HandleFunc("/feeds/counters", handler.fetchCounters).Methods(http.MethodGet)
sr.HandleFunc("/feeds/refresh", handler.refreshAllFeeds).Methods(http.MethodPut)
sr.HandleFunc("/feeds/{feedID}/refresh", handler.refreshFeed).Methods(http.MethodPut)
sr.HandleFunc("/feeds/{feedID}", handler.getFeed).Methods(http.MethodGet)
sr.HandleFunc("/feeds/{feedID}", handler.updateFeed).Methods(http.MethodPut)
sr.HandleFunc("/feeds/{feedID}", handler.removeFeed).Methods(http.MethodDelete)
sr.HandleFunc("/feeds/{feedID}/icon", handler.getIconByFeedID).Methods(http.MethodGet)
sr.HandleFunc("/feeds/{feedID}/mark-all-as-read", handler.markFeedAsRead).Methods(http.MethodPut)
sr.HandleFunc("/export", handler.exportFeeds).Methods(http.MethodGet)
sr.HandleFunc("/import", handler.importFeeds).Methods(http.MethodPost)
sr.HandleFunc("/feeds/{feedID}/entries", handler.getFeedEntries).Methods(http.MethodGet)
sr.HandleFunc("/feeds/{feedID}/entries/{entryID}", handler.getFeedEntry).Methods(http.MethodGet)
sr.HandleFunc("/entries", handler.getEntries).Methods(http.MethodGet)
sr.HandleFunc("/entries", handler.setEntryStatus).Methods(http.MethodPut)
sr.HandleFunc("/entries/{entryID}", handler.getEntry).Methods(http.MethodGet)
sr.HandleFunc("/entries/{entryID}", handler.updateEntry).Methods(http.MethodPut)
sr.HandleFunc("/entries/{entryID}/bookmark", handler.toggleBookmark).Methods(http.MethodPut)
sr.HandleFunc("/entries/{entryID}/save", handler.saveEntry).Methods(http.MethodPost)
sr.HandleFunc("/entries/{entryID}/fetch-content", handler.fetchContent).Methods(http.MethodGet)
sr.HandleFunc("/flush-history", handler.flushHistory).Methods(http.MethodPut, http.MethodDelete)
sr.HandleFunc("/icons/{iconID}", handler.getIconByIconID).Methods(http.MethodGet)
sr.HandleFunc("/enclosures/{enclosureID}", handler.getEnclosureByID).Methods(http.MethodGet)
sr.HandleFunc("/enclosures/{enclosureID}", handler.updateEnclosureByID).Methods(http.MethodPut)
sr.HandleFunc("/integrations/status", handler.getIntegrationsStatus).Methods(http.MethodGet)
sr.HandleFunc("/version", handler.versionHandler).Methods(http.MethodGet)
}
func (h *handler) versionHandler(w http.ResponseWriter, r *http.Request) {
json.OK(w, r, &versionResponse{
Version: version.Version,
Commit: version.Commit,
BuildDate: version.BuildDate,
GoVersion: runtime.Version(),
Compiler: runtime.Compiler,
Arch: runtime.GOARCH,
OS: runtime.GOOS,
})
}
v2-2.2.6/internal/api/api_integration_test.go 0000664 0000000 0000000 00000231667 14756465373 0021242 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"bytes"
"errors"
"fmt"
"io"
"math/rand"
"os"
"strings"
"testing"
miniflux "miniflux.app/v2/client"
)
const skipIntegrationTestsMessage = `Set TEST_MINIFLUX_* environment variables to run the API integration tests`
type integrationTestConfig struct {
testBaseURL string
testAdminUsername string
testAdminPassword string
testRegularUsername string
testRegularPassword string
testFeedURL string
testFeedTitle string
testSubscriptionTitle string
testWebsiteURL string
}
func newIntegrationTestConfig() *integrationTestConfig {
getDefaultEnvValues := func(key, defaultValue string) string {
value := os.Getenv(key)
if value == "" {
return defaultValue
}
return value
}
return &integrationTestConfig{
testBaseURL: getDefaultEnvValues("TEST_MINIFLUX_BASE_URL", ""),
testAdminUsername: getDefaultEnvValues("TEST_MINIFLUX_ADMIN_USERNAME", ""),
testAdminPassword: getDefaultEnvValues("TEST_MINIFLUX_ADMIN_PASSWORD", ""),
testRegularUsername: getDefaultEnvValues("TEST_MINIFLUX_REGULAR_USERNAME_PREFIX", "regular_test_user"),
testRegularPassword: getDefaultEnvValues("TEST_MINIFLUX_REGULAR_PASSWORD", "regular_test_user_password"),
testFeedURL: getDefaultEnvValues("TEST_MINIFLUX_FEED_URL", "https://miniflux.app/feed.xml"),
testFeedTitle: getDefaultEnvValues("TEST_MINIFLUX_FEED_TITLE", "Miniflux"),
testSubscriptionTitle: getDefaultEnvValues("TEST_MINIFLUX_SUBSCRIPTION_TITLE", "Miniflux Releases"),
testWebsiteURL: getDefaultEnvValues("TEST_MINIFLUX_WEBSITE_URL", "https://miniflux.app"),
}
}
func (c *integrationTestConfig) isConfigured() bool {
return c.testBaseURL != "" && c.testAdminUsername != "" && c.testAdminPassword != "" && c.testFeedURL != "" && c.testFeedTitle != "" && c.testSubscriptionTitle != "" && c.testWebsiteURL != ""
}
func (c *integrationTestConfig) genRandomUsername() string {
return fmt.Sprintf("%s_%10d", c.testRegularUsername, rand.Int())
}
func TestIncorrectEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient("incorrect url")
if _, err := client.Users(); err == nil {
t.Fatal(`Using an incorrect URL should raise an error`)
}
client = miniflux.NewClient("")
if _, err := client.Users(); err == nil {
t.Fatal(`Using an empty URL should raise an error`)
}
}
func TestHealthcheckEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL)
if err := client.Healthcheck(); err != nil {
t.Fatal(err)
}
}
func TestVersionEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
version, err := client.Version()
if err != nil {
t.Fatal(err)
}
if version.Version == "" {
t.Fatal(`Version should not be empty`)
}
if version.Commit == "" {
t.Fatal(`Commit should not be empty`)
}
if version.BuildDate == "" {
t.Fatal(`Build date should not be empty`)
}
if version.GoVersion == "" {
t.Fatal(`Go version should not be empty`)
}
if version.Compiler == "" {
t.Fatal(`Compiler should not be empty`)
}
if version.Arch == "" {
t.Fatal(`Arch should not be empty`)
}
if version.OS == "" {
t.Fatal(`OS should not be empty`)
}
}
func TestInvalidCredentials(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, "invalid", "invalid")
_, err := client.Users()
if err == nil {
t.Fatal(`Using bad credentials should raise an error`)
}
if err != miniflux.ErrNotAuthorized {
t.Fatal(`A "Not Authorized" error should be raised`)
}
}
func TestGetMeEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
user, err := client.Me()
if err != nil {
t.Fatal(err)
}
if user.Username != testConfig.testAdminUsername {
t.Fatalf(`Invalid username, got %q instead of %q`, user.Username, testConfig.testAdminUsername)
}
}
func TestGetUsersEndpointAsAdmin(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
users, err := client.Users()
if err != nil {
t.Fatal(err)
}
if len(users) == 0 {
t.Fatal(`Users should not be empty`)
}
if users[0].ID == 0 {
t.Fatalf(`Invalid userID, got "%v"`, users[0].ID)
}
if users[0].Username != testConfig.testAdminUsername {
t.Fatalf(`Invalid username, got "%v" instead of "%v"`, users[0].Username, testConfig.testAdminUsername)
}
if users[0].Password != "" {
t.Fatalf(`Invalid password, got "%v"`, users[0].Password)
}
if users[0].Language != "en_US" {
t.Fatalf(`Invalid language, got "%v"`, users[0].Language)
}
if users[0].Theme != "light_serif" {
t.Fatalf(`Invalid theme, got "%v"`, users[0].Theme)
}
if users[0].Timezone != "UTC" {
t.Fatalf(`Invalid timezone, got "%v"`, users[0].Timezone)
}
if !users[0].IsAdmin {
t.Fatalf(`Invalid role, got "%v"`, users[0].IsAdmin)
}
if users[0].EntriesPerPage != 100 {
t.Fatalf(`Invalid entries per page, got "%v"`, users[0].EntriesPerPage)
}
if users[0].DisplayMode != "standalone" {
t.Fatalf(`Invalid web app display mode, got "%v"`, users[0].DisplayMode)
}
if users[0].GestureNav != "tap" {
t.Fatalf(`Invalid gesture navigation, got "%v"`, users[0].GestureNav)
}
if users[0].DefaultReadingSpeed != 265 {
t.Fatalf(`Invalid default reading speed, got "%v"`, users[0].DefaultReadingSpeed)
}
if users[0].CJKReadingSpeed != 500 {
t.Fatalf(`Invalid cjk reading speed, got "%v"`, users[0].CJKReadingSpeed)
}
}
func TestGetUsersEndpointAsRegularUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
_, err = regularUserClient.Users()
if err == nil {
t.Fatal(`Regular users should not have access to the users endpoint`)
}
}
func TestCreateUserEndpointAsAdmin(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
username := testConfig.genRandomUsername()
regularTestUser, err := client.CreateUser(username, testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer client.DeleteUser(regularTestUser.ID)
if regularTestUser.Username != username {
t.Fatalf(`Invalid username, got "%v" instead of "%v"`, regularTestUser.Username, username)
}
if regularTestUser.Password != "" {
t.Fatalf(`Invalid password, got "%v"`, regularTestUser.Password)
}
if regularTestUser.Language != "en_US" {
t.Fatalf(`Invalid language, got "%v"`, regularTestUser.Language)
}
if regularTestUser.Theme != "light_serif" {
t.Fatalf(`Invalid theme, got "%v"`, regularTestUser.Theme)
}
if regularTestUser.Timezone != "UTC" {
t.Fatalf(`Invalid timezone, got "%v"`, regularTestUser.Timezone)
}
if regularTestUser.IsAdmin {
t.Fatalf(`Invalid role, got "%v"`, regularTestUser.IsAdmin)
}
if regularTestUser.EntriesPerPage != 100 {
t.Fatalf(`Invalid entries per page, got "%v"`, regularTestUser.EntriesPerPage)
}
if regularTestUser.DisplayMode != "standalone" {
t.Fatalf(`Invalid web app display mode, got "%v"`, regularTestUser.DisplayMode)
}
if regularTestUser.GestureNav != "tap" {
t.Fatalf(`Invalid gesture navigation, got "%v"`, regularTestUser.GestureNav)
}
if regularTestUser.DefaultReadingSpeed != 265 {
t.Fatalf(`Invalid default reading speed, got "%v"`, regularTestUser.DefaultReadingSpeed)
}
if regularTestUser.CJKReadingSpeed != 500 {
t.Fatalf(`Invalid cjk reading speed, got "%v"`, regularTestUser.CJKReadingSpeed)
}
}
func TestCreateUserEndpointAsRegularUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
_, err = regularUserClient.CreateUser(regularTestUser.Username, testConfig.testRegularPassword, false)
if err == nil {
t.Fatal(`Regular users should not have access to the create user endpoint`)
}
}
func TestCannotCreateDuplicateUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.CreateUser(testConfig.testAdminUsername, testConfig.testAdminPassword, true)
if err == nil {
t.Fatal(`Duplicated users should not be allowed`)
}
}
func TestRemoveUserEndpointAsAdmin(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
user, err := client.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
if err := client.DeleteUser(user.ID); err != nil {
t.Fatal(err)
}
}
func TestRemoveUserEndpointAsRegularUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
err = regularUserClient.DeleteUser(regularTestUser.ID)
if err == nil {
t.Fatal(`Regular users should not have access to the remove user endpoint`)
}
}
func TestGetUserByIDEndpointAsAdmin(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
user, err := client.Me()
if err != nil {
t.Fatal(err)
}
userByID, err := client.UserByID(user.ID)
if err != nil {
t.Fatal(err)
}
if userByID.ID != user.ID {
t.Errorf(`Invalid userID, got "%v" instead of "%v"`, userByID.ID, user.ID)
}
if userByID.Username != user.Username {
t.Errorf(`Invalid username, got "%v" instead of "%v"`, userByID.Username, user.Username)
}
if userByID.Password != "" {
t.Errorf(`The password field must be empty, got "%v"`, userByID.Password)
}
if userByID.Language != user.Language {
t.Errorf(`Invalid language, got "%v"`, userByID.Language)
}
if userByID.Theme != user.Theme {
t.Errorf(`Invalid theme, got "%v"`, userByID.Theme)
}
if userByID.Timezone != user.Timezone {
t.Errorf(`Invalid timezone, got "%v"`, userByID.Timezone)
}
if userByID.IsAdmin != user.IsAdmin {
t.Errorf(`Invalid role, got "%v"`, userByID.IsAdmin)
}
if userByID.EntriesPerPage != user.EntriesPerPage {
t.Errorf(`Invalid entries per page, got "%v"`, userByID.EntriesPerPage)
}
if userByID.DisplayMode != user.DisplayMode {
t.Errorf(`Invalid web app display mode, got "%v"`, userByID.DisplayMode)
}
if userByID.GestureNav != user.GestureNav {
t.Errorf(`Invalid gesture navigation, got "%v"`, userByID.GestureNav)
}
if userByID.DefaultReadingSpeed != user.DefaultReadingSpeed {
t.Errorf(`Invalid default reading speed, got "%v"`, userByID.DefaultReadingSpeed)
}
if userByID.CJKReadingSpeed != user.CJKReadingSpeed {
t.Errorf(`Invalid cjk reading speed, got "%v"`, userByID.CJKReadingSpeed)
}
if userByID.EntryDirection != user.EntryDirection {
t.Errorf(`Invalid entry direction, got "%v"`, userByID.EntryDirection)
}
if userByID.EntryOrder != user.EntryOrder {
t.Errorf(`Invalid entry order, got "%v"`, userByID.EntryOrder)
}
}
func TestGetUserByIDEndpointAsRegularUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
_, err = regularUserClient.UserByID(regularTestUser.ID)
if err == nil {
t.Fatal(`Regular users should not have access to the user by ID endpoint`)
}
}
func TestGetUserByUsernameEndpointAsAdmin(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
user, err := client.Me()
if err != nil {
t.Fatal(err)
}
userByUsername, err := client.UserByUsername(user.Username)
if err != nil {
t.Fatal(err)
}
if userByUsername.ID != user.ID {
t.Errorf(`Invalid userID, got "%v" instead of "%v"`, userByUsername.ID, user.ID)
}
if userByUsername.Username != user.Username {
t.Errorf(`Invalid username, got "%v" instead of "%v"`, userByUsername.Username, user.Username)
}
if userByUsername.Password != "" {
t.Errorf(`The password field must be empty, got "%v"`, userByUsername.Password)
}
if userByUsername.Language != user.Language {
t.Errorf(`Invalid language, got "%v"`, userByUsername.Language)
}
if userByUsername.Theme != user.Theme {
t.Errorf(`Invalid theme, got "%v"`, userByUsername.Theme)
}
if userByUsername.Timezone != user.Timezone {
t.Errorf(`Invalid timezone, got "%v"`, userByUsername.Timezone)
}
if userByUsername.IsAdmin != user.IsAdmin {
t.Errorf(`Invalid role, got "%v"`, userByUsername.IsAdmin)
}
if userByUsername.EntriesPerPage != user.EntriesPerPage {
t.Errorf(`Invalid entries per page, got "%v"`, userByUsername.EntriesPerPage)
}
if userByUsername.DisplayMode != user.DisplayMode {
t.Errorf(`Invalid web app display mode, got "%v"`, userByUsername.DisplayMode)
}
if userByUsername.GestureNav != user.GestureNav {
t.Errorf(`Invalid gesture navigation, got "%v"`, userByUsername.GestureNav)
}
if userByUsername.DefaultReadingSpeed != user.DefaultReadingSpeed {
t.Errorf(`Invalid default reading speed, got "%v"`, userByUsername.DefaultReadingSpeed)
}
if userByUsername.CJKReadingSpeed != user.CJKReadingSpeed {
t.Errorf(`Invalid cjk reading speed, got "%v"`, userByUsername.CJKReadingSpeed)
}
if userByUsername.EntryDirection != user.EntryDirection {
t.Errorf(`Invalid entry direction, got "%v"`, userByUsername.EntryDirection)
}
}
func TestGetUserByUsernameEndpointAsRegularUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
_, err = regularUserClient.UserByUsername(regularTestUser.Username)
if err == nil {
t.Fatal(`Regular users should not have access to the user by username endpoint`)
}
}
func TestUpdateUserEndpointByChangingDefaultTheme(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
Theme: miniflux.SetOptionalField("dark_serif"),
}
updatedUser, err := regularUserClient.UpdateUser(regularTestUser.ID, userUpdateRequest)
if err != nil {
t.Fatal(err)
}
if updatedUser.Theme != "dark_serif" {
t.Fatalf(`Invalid theme, got "%v"`, updatedUser.Theme)
}
}
func TestUpdateUserEndpointByChangingExternalFonts(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
ExternalFontHosts: miniflux.SetOptionalField(" fonts.example.org "),
}
updatedUser, err := regularUserClient.UpdateUser(regularTestUser.ID, userUpdateRequest)
if err != nil {
t.Fatal(err)
}
if updatedUser.ExternalFontHosts != "fonts.example.org" {
t.Fatalf(`Invalid external font hosts, got "%v"`, updatedUser.ExternalFontHosts)
}
}
func TestUpdateUserEndpointByChangingExternalFontsWithInvalidValue(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
ExternalFontHosts: miniflux.SetOptionalField("'self' *"),
}
if _, err := regularUserClient.UpdateUser(regularTestUser.ID, userUpdateRequest); err == nil {
t.Fatal(`Updating the user with an invalid external font host should raise an error`)
}
}
func TestUpdateUserEndpointByChangingCustomJS(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
CustomJS: miniflux.SetOptionalField("alert('Hello, World!');"),
}
updatedUser, err := regularUserClient.UpdateUser(regularTestUser.ID, userUpdateRequest)
if err != nil {
t.Fatal(err)
}
if updatedUser.CustomJS != "alert('Hello, World!');" {
t.Fatalf(`Invalid custom JS, got %q`, updatedUser.CustomJS)
}
}
func TestUpdateUserEndpointByChangingDefaultThemeToInvalidValue(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
Theme: miniflux.SetOptionalField("invalid_theme"),
}
_, err = regularUserClient.UpdateUser(regularTestUser.ID, userUpdateRequest)
if err == nil {
t.Fatal(`Updating the user with an invalid theme should raise an error`)
}
}
func TestRegularUsersCannotUpdateOtherUsers(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
adminUser, err := adminClient.Me()
if err != nil {
t.Fatal(err)
}
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
userUpdateRequest := &miniflux.UserModificationRequest{
Theme: miniflux.SetOptionalField("dark_serif"),
}
_, err = regularUserClient.UpdateUser(adminUser.ID, userUpdateRequest)
if err == nil {
t.Fatal(`Regular users should not be able to update other users`)
}
}
func TestMarkUserAsReadEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.MarkAllAsRead(regularTestUser.ID); err != nil {
t.Fatal(err)
}
results, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatal(err)
}
for _, entry := range results.Entries {
if entry.Status != miniflux.EntryStatusRead {
t.Errorf(`Status for entry %d was %q instead of %q`, entry.ID, entry.Status, miniflux.EntryStatusRead)
}
}
}
func TestCannotMarkUserAsReadAsOtherUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
adminUser, err := adminClient.Me()
if err != nil {
t.Fatal(err)
}
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
if err := regularUserClient.MarkAllAsRead(adminUser.ID); err == nil {
t.Fatalf(`Non-admin users should not be able to mark another user as read`)
}
}
func TestCreateCategoryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
categoryName := "My category"
category, err := regularUserClient.CreateCategory(categoryName)
if err != nil {
t.Fatal(err)
}
if category.ID == 0 {
t.Errorf(`Invalid categoryID, got "%v"`, category.ID)
}
if category.UserID <= 0 {
t.Errorf(`Invalid userID, got "%v"`, category.UserID)
}
if category.Title != categoryName {
t.Errorf(`Invalid title, got "%v" instead of "%v"`, category.Title, categoryName)
}
}
func TestCreateCategoryWithEmptyTitle(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.CreateCategory("")
if err == nil {
t.Fatalf(`Creating a category with an empty title should raise an error`)
}
}
func TestCannotCreateDuplicatedCategory(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
categoryName := "My category"
if _, err := regularUserClient.CreateCategory(categoryName); err != nil {
t.Fatal(err)
}
if _, err = regularUserClient.CreateCategory(categoryName); err == nil {
t.Fatalf(`Duplicated categories should not be allowed`)
}
}
func TestUpdateCatgoryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
categoryName := "My category"
category, err := regularUserClient.CreateCategory(categoryName)
if err != nil {
t.Fatal(err)
}
updatedCategory, err := regularUserClient.UpdateCategory(category.ID, "new title")
if err != nil {
t.Fatal(err)
}
if updatedCategory.ID != category.ID {
t.Errorf(`Invalid categoryID, got "%v"`, updatedCategory.ID)
}
if updatedCategory.UserID != regularTestUser.ID {
t.Errorf(`Invalid userID, got "%v"`, updatedCategory.UserID)
}
if updatedCategory.Title != "new title" {
t.Errorf(`Invalid title, got "%v" instead of "%v"`, updatedCategory.Title, "new title")
}
}
func TestUpdateInexistingCategory(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.UpdateCategory(123456789, "new title")
if err == nil {
t.Fatalf(`Updating an inexisting category should raise an error`)
}
}
func TestDeleteCategoryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
categoryName := "My category"
category, err := regularUserClient.CreateCategory(categoryName)
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.DeleteCategory(category.ID); err != nil {
t.Fatal(err)
}
}
func TestCannotDeleteInexistingCategory(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
err := client.DeleteCategory(123456789)
if err == nil {
t.Fatalf(`Deleting an inexisting category should raise an error`)
}
}
func TestCannotDeleteCategoryOfAnotherUser(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
err = adminClient.DeleteCategory(category.ID)
if err == nil {
t.Fatalf(`Regular users should not be able to delete categories of other users`)
}
}
func TestGetCategoriesEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
categories, err := regularUserClient.Categories()
if err != nil {
t.Fatal(err)
}
if len(categories) != 2 {
t.Fatalf(`Invalid number of categories, got %d instead of %d`, len(categories), 1)
}
if categories[0].UserID != regularTestUser.ID {
t.Fatalf(`Invalid userID, got %d`, categories[0].UserID)
}
if categories[0].Title != "All" {
t.Fatalf(`Invalid title, got %q instead of %q`, categories[0].Title, "All")
}
if categories[1].ID != category.ID {
t.Fatalf(`Invalid categoryID, got %d`, categories[0].ID)
}
if categories[1].UserID != regularTestUser.ID {
t.Fatalf(`Invalid userID, got %d`, categories[0].UserID)
}
if categories[1].Title != "My category" {
t.Fatalf(`Invalid title, got %q instead of %q`, categories[0].Title, "My category")
}
}
func TestMarkCategoryAsReadEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.MarkCategoryAsRead(category.ID); err != nil {
t.Fatal(err)
}
results, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatal(err)
}
for _, entry := range results.Entries {
if entry.Status != miniflux.EntryStatusRead {
t.Errorf(`Status for entry %d was %q instead of %q`, entry.ID, entry.Status, miniflux.EntryStatusRead)
}
}
}
func TestCreateFeedEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Errorf(`Invalid feedID, got "%v"`, feedID)
}
}
func TestCannotCreateDuplicatedFeed(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feedID, got "%v"`, feedID)
}
_, err = regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err == nil {
t.Fatalf(`Duplicated feeds should not be allowed`)
}
}
func TestCreateFeedWithInexistingCategory(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
_, err = regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
CategoryID: 123456789,
})
if err == nil {
t.Fatalf(`Creating a feed with an inexisting category should raise an error`)
}
}
func TestCreateFeedWithEmptyFeedURL(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: "",
})
if err == nil {
t.Fatalf(`Creating a feed with an empty feed URL should raise an error`)
}
}
func TestCreateFeedWithInvalidFeedURL(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: "invalid_feed_url",
})
if err == nil {
t.Fatalf(`Creating a feed with an invalid feed URL should raise an error`)
}
}
func TestCreateDisabledFeed(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
Disabled: true,
})
if err != nil {
t.Fatal(err)
}
feed, err := regularUserClient.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.Disabled {
t.Fatalf(`The feed should be disabled`)
}
}
func TestCreateFeedWithDisabledHTTPCache(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
IgnoreHTTPCache: true,
})
if err != nil {
t.Fatal(err)
}
feed, err := regularUserClient.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.IgnoreHTTPCache {
t.Fatalf(`The feed should ignore the HTTP cache`)
}
}
func TestCreateFeedWithScraperRule(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
ScraperRules: "article",
})
if err != nil {
t.Fatal(err)
}
feed, err := regularUserClient.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if feed.ScraperRules != "article" {
t.Fatalf(`The feed should have the scraper rules set to "article"`)
}
}
func TestUpdateFeedEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
feedUpdateRequest := &miniflux.FeedModificationRequest{
FeedURL: miniflux.SetOptionalField("https://example.org/feed.xml"),
}
updatedFeed, err := regularUserClient.UpdateFeed(feedID, feedUpdateRequest)
if err != nil {
t.Fatal(err)
}
if updatedFeed.FeedURL != "https://example.org/feed.xml" {
t.Fatalf(`Invalid feed URL, got "%v"`, updatedFeed.FeedURL)
}
}
func TestCannotHaveDuplicateFeedWhenUpdatingFeed(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
if _, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{FeedURL: testConfig.testFeedURL}); err != nil {
t.Fatal(err)
}
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: "https://github.com/miniflux/v2/commits.atom",
})
if err != nil {
t.Fatal(err)
}
feedUpdateRequest := &miniflux.FeedModificationRequest{
FeedURL: miniflux.SetOptionalField(testConfig.testFeedURL),
}
if _, err := regularUserClient.UpdateFeed(feedID, feedUpdateRequest); err == nil {
t.Fatalf(`Duplicated feeds should not be allowed`)
}
}
func TestUpdateFeedWithInvalidCategory(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
feedUpdateRequest := &miniflux.FeedModificationRequest{
CategoryID: miniflux.SetOptionalField(int64(123456789)),
}
if _, err := regularUserClient.UpdateFeed(feedID, feedUpdateRequest); err == nil {
t.Fatalf(`Updating a feed with an inexisting category should raise an error`)
}
}
func TestMarkFeedAsReadEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.MarkFeedAsRead(feedID); err != nil {
t.Fatal(err)
}
results, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get updated entries: %v`, err)
}
for _, entry := range results.Entries {
if entry.Status != miniflux.EntryStatusRead {
t.Errorf(`Status for entry %d was %q instead of %q`, entry.ID, entry.Status, miniflux.EntryStatusRead)
}
}
}
func TestFetchCountersEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
counters, err := regularUserClient.FetchCounters()
if err != nil {
t.Fatal(err)
}
if value, ok := counters.ReadCounters[feedID]; ok && value != 0 {
t.Errorf(`Invalid read counter, got %d`, value)
}
if value, ok := counters.UnreadCounters[feedID]; !ok || value == 0 {
t.Errorf(`Invalid unread counter, got %d`, value)
}
}
func TestDeleteFeedEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.DeleteFeed(feedID); err != nil {
t.Fatal(err)
}
}
func TestRefreshAllFeedsEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
if err := regularUserClient.RefreshAllFeeds(); err != nil {
t.Fatal(err)
}
}
func TestRefreshFeedEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
if err := regularUserClient.RefreshFeed(feedID); err != nil {
t.Fatal(err)
}
}
func TestGetFeedEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
feed, err := regularUserClient.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if feed.ID != feedID {
t.Fatalf(`Invalid feedID, got %d`, feed.ID)
}
if feed.FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, feed.FeedURL)
}
if feed.SiteURL != testConfig.testWebsiteURL {
t.Fatalf(`Invalid site URL, got %q`, feed.SiteURL)
}
if feed.Title != testConfig.testFeedTitle {
t.Fatalf(`Invalid title, got %q`, feed.Title)
}
}
func TestGetFeedIcon(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
icon, err := regularUserClient.FeedIcon(feedID)
if err != nil {
t.Fatal(err)
}
if icon == nil {
t.Fatalf(`Invalid icon, got nil`)
}
if icon.MimeType == "" {
t.Fatalf(`Invalid mime type, got %q`, icon.MimeType)
}
if len(icon.Data) == 0 {
t.Fatalf(`Invalid data, got empty`)
}
icon, err = regularUserClient.Icon(icon.ID)
if err != nil {
t.Fatal(err)
}
if icon == nil {
t.Fatalf(`Invalid icon, got nil`)
}
if icon.MimeType == "" {
t.Fatalf(`Invalid mime type, got %q`, icon.MimeType)
}
if len(icon.Data) == 0 {
t.Fatalf(`Invalid data, got empty`)
}
}
func TestGetFeedIconWithInexistingFeedID(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.FeedIcon(123456789)
if err == nil {
t.Fatalf(`Fetching the icon of an inexisting feed should raise an error`)
}
}
func TestGetFeedsEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
feeds, err := regularUserClient.Feeds()
if err != nil {
t.Fatal(err)
}
if len(feeds) != 1 {
t.Fatalf(`Invalid number of feeds, got %d`, len(feeds))
}
if feeds[0].ID != feedID {
t.Fatalf(`Invalid feedID, got %d`, feeds[0].ID)
}
if feeds[0].FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, feeds[0].FeedURL)
}
}
func TestGetCategoryFeedsEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
feeds, err := regularUserClient.CategoryFeeds(category.ID)
if err != nil {
t.Fatal(err)
}
if len(feeds) != 1 {
t.Fatalf(`Invalid number of feeds, got %d`, len(feeds))
}
if feeds[0].ID != feedID {
t.Fatalf(`Invalid feedID, got %d`, feeds[0].ID)
}
if feeds[0].FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, feeds[0].FeedURL)
}
}
func TestExportEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
if _, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{FeedURL: testConfig.testFeedURL}); err != nil {
t.Fatal(err)
}
exportedData, err := regularUserClient.Export()
if err != nil {
t.Fatal(err)
}
if len(exportedData) == 0 {
t.Fatalf(`Invalid exported data, got empty`)
}
if !strings.HasPrefix(string(exportedData), "
`
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
bytesReader := bytes.NewReader([]byte(data))
if err := regularUserClient.Import(io.NopCloser(bytesReader)); err != nil {
t.Fatal(err)
}
}
func TestDiscoverSubscriptionsEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
subscriptions, err := client.Discover(testConfig.testWebsiteURL)
if err != nil {
t.Fatal(err)
}
if len(subscriptions) == 0 {
t.Fatalf(`Invalid number of subscriptions, got %d`, len(subscriptions))
}
if subscriptions[0].Title != testConfig.testSubscriptionTitle {
t.Fatalf(`Invalid title, got %q`, subscriptions[0].Title)
}
if subscriptions[0].URL != testConfig.testFeedURL {
t.Fatalf(`Invalid URL, got %q`, subscriptions[0].URL)
}
}
func TestDiscoverSubscriptionsWithInvalidURL(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
_, err := client.Discover("invalid_url")
if err == nil {
t.Fatalf(`Discovering subscriptions with an invalid URL should raise an error`)
}
}
func TestDiscoverSubscriptionsWithNoSubscription(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
client := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
if _, err := client.Discover(testConfig.testBaseURL); err != miniflux.ErrNotFound {
t.Fatalf(`Discovering subscriptions with no subscription should raise a 404 error`)
}
}
func TestGetAllFeedEntriesEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
results, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatal(err)
}
if len(results.Entries) == 0 {
t.Fatalf(`Invalid number of entries, got %d`, len(results.Entries))
}
if results.Total == 0 {
t.Fatalf(`Invalid total, got %d`, results.Total)
}
if results.Entries[0].FeedID != feedID {
t.Fatalf(`Invalid feedID, got %d`, results.Entries[0].FeedID)
}
if results.Entries[0].Feed.FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, results.Entries[0].Feed.FeedURL)
}
if results.Entries[0].Title == "" {
t.Fatalf(`Invalid title, got empty`)
}
}
func TestGetAllCategoryEntriesEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
category, err := regularUserClient.CreateCategory("My category")
if err != nil {
t.Fatal(err)
}
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
results, err := regularUserClient.CategoryEntries(category.ID, nil)
if err != nil {
t.Fatal(err)
}
if len(results.Entries) == 0 {
t.Fatalf(`Invalid number of entries, got %d`, len(results.Entries))
}
if results.Total == 0 {
t.Fatalf(`Invalid total, got %d`, results.Total)
}
if results.Entries[0].FeedID != feedID {
t.Fatalf(`Invalid feedID, got %d`, results.Entries[0].FeedID)
}
if results.Entries[0].Feed.FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, results.Entries[0].Feed.FeedURL)
}
if results.Entries[0].Title == "" {
t.Fatalf(`Invalid title, got empty`)
}
}
func TestGetAllEntriesEndpointWithFilter(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
feedEntries, err := regularUserClient.Entries(&miniflux.Filter{FeedID: feedID})
if err != nil {
t.Fatal(err)
}
if len(feedEntries.Entries) == 0 {
t.Fatalf(`Invalid number of entries, got %d`, len(feedEntries.Entries))
}
if feedEntries.Total == 0 {
t.Fatalf(`Invalid total, got %d`, feedEntries.Total)
}
if feedEntries.Entries[0].FeedID != feedID {
t.Fatalf(`Invalid feedID, got %d`, feedEntries.Entries[0].FeedID)
}
if feedEntries.Entries[0].Feed.FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, feedEntries.Entries[0].Feed.FeedURL)
}
if feedEntries.Entries[0].Title == "" {
t.Fatalf(`Invalid title, got empty`)
}
recentEntries, err := regularUserClient.Entries(&miniflux.Filter{Order: "published_at", Direction: "desc"})
if err != nil {
t.Fatal(err)
}
if len(recentEntries.Entries) == 0 {
t.Fatalf(`Invalid number of entries, got %d`, len(recentEntries.Entries))
}
if recentEntries.Total == 0 {
t.Fatalf(`Invalid total, got %d`, recentEntries.Total)
}
if feedEntries.Entries[0].Title == recentEntries.Entries[0].Title {
t.Fatalf(`Invalid order, got the same title`)
}
searchedEntries, err := regularUserClient.Entries(&miniflux.Filter{Search: "2.0.8"})
if err != nil {
t.Fatal(err)
}
if searchedEntries.Total != 1 {
t.Fatalf(`Invalid total, got %d`, searchedEntries.Total)
}
if _, err := regularUserClient.Entries(&miniflux.Filter{Status: "invalid"}); err == nil {
t.Fatal(`Using invalid status should raise an error`)
}
if _, err = regularUserClient.Entries(&miniflux.Filter{Direction: "invalid"}); err == nil {
t.Fatal(`Using invalid direction should raise an error`)
}
if _, err = regularUserClient.Entries(&miniflux.Filter{Order: "invalid"}); err == nil {
t.Fatal(`Using invalid order should raise an error`)
}
}
func TestGetGlobalEntriesEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
HideGlobally: true,
})
if err != nil {
t.Fatal(err)
}
feedIDEntry, err := regularUserClient.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if feedIDEntry.HideGlobally != true {
t.Fatalf(`Expected feed to have globally_hidden set to true, was false.`)
}
/* Not filtering on GloballyVisible should return all entries */
feedEntries, err := regularUserClient.Entries(&miniflux.Filter{FeedID: feedID})
if err != nil {
t.Fatal(err)
}
if len(feedEntries.Entries) == 0 {
t.Fatalf(`Expected entries but response contained none.`)
}
/* Feed is hidden globally, so this should be empty */
globallyVisibleEntries, err := regularUserClient.Entries(&miniflux.Filter{GloballyVisible: true})
if err != nil {
t.Fatal(err)
}
if len(globallyVisibleEntries.Entries) != 0 {
t.Fatalf(`Expected no entries, got %d`, len(globallyVisibleEntries.Entries))
}
}
func TestUpdateEnclosureEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
var enclosure *miniflux.Enclosure
for _, entry := range result.Entries {
if len(entry.Enclosures) > 0 {
enclosure = entry.Enclosures[0]
break
}
}
if enclosure == nil {
t.Skip(`Skipping test, missing enclosure in feed.`)
}
err = regularUserClient.UpdateEnclosure(enclosure.ID, &miniflux.EnclosureUpdateRequest{
MediaProgression: 20,
})
if err != nil {
t.Fatal(err)
}
updatedEnclosure, err := regularUserClient.Enclosure(enclosure.ID)
if err != nil {
t.Fatal(err)
}
if updatedEnclosure.MediaProgression != 20 {
t.Fatalf(`Failed to update media_progression, expected %d but got %d`, 20, updatedEnclosure.MediaProgression)
}
}
func TestGetEnclosureEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
var expectedEnclosure *miniflux.Enclosure
for _, entry := range result.Entries {
if len(entry.Enclosures) > 0 {
expectedEnclosure = entry.Enclosures[0]
break
}
}
if expectedEnclosure == nil {
t.Skip(`Skipping test, missing enclosure in feed.`)
}
enclosure, err := regularUserClient.Enclosure(expectedEnclosure.ID)
if err != nil {
t.Fatal(err)
}
if enclosure.ID != expectedEnclosure.ID {
t.Fatalf(`Invalid enclosureID, got %d while expecting %d`, enclosure.ID, expectedEnclosure.ID)
}
if _, err = regularUserClient.Enclosure(99999); err == nil {
t.Fatalf(`Fetching an inexisting enclosure should raise an error`)
}
}
func TestGetEntryEndpoints(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
entry, err := regularUserClient.FeedEntry(feedID, result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if entry.ID != result.Entries[0].ID {
t.Fatalf(`Invalid entryID, got %d`, entry.ID)
}
if entry.FeedID != feedID {
t.Fatalf(`Invalid feedID, got %d`, entry.FeedID)
}
if entry.Feed.FeedURL != testConfig.testFeedURL {
t.Fatalf(`Invalid feed URL, got %q`, entry.Feed.FeedURL)
}
entry, err = regularUserClient.Entry(result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if entry.ID != result.Entries[0].ID {
t.Fatalf(`Invalid entryID, got %d`, entry.ID)
}
entry, err = regularUserClient.CategoryEntry(result.Entries[0].Feed.Category.ID, result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if entry.ID != result.Entries[0].ID {
t.Fatalf(`Invalid entryID, got %d`, entry.ID)
}
}
func TestUpdateEntryStatusEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
if err := regularUserClient.UpdateEntries([]int64{result.Entries[0].ID}, miniflux.EntryStatusRead); err != nil {
t.Fatal(err)
}
entry, err := regularUserClient.Entry(result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if entry.Status != miniflux.EntryStatusRead {
t.Fatalf(`Invalid status, got %q`, entry.Status)
}
}
func TestUpdateEntryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, nil)
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
entryUpdateRequest := &miniflux.EntryModificationRequest{
Title: miniflux.SetOptionalField("New title"),
Content: miniflux.SetOptionalField("New content"),
}
updatedEntry, err := regularUserClient.UpdateEntry(result.Entries[0].ID, entryUpdateRequest)
if err != nil {
t.Fatal(err)
}
if updatedEntry.Title != "New title" {
t.Errorf(`Invalid title, got %q`, updatedEntry.Title)
}
if updatedEntry.Content != "New content" {
t.Errorf(`Invalid content, got %q`, updatedEntry.Content)
}
entry, err := regularUserClient.Entry(result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if entry.Title != "New title" {
t.Errorf(`Invalid title, got %q`, entry.Title)
}
if entry.Content != "New content" {
t.Errorf(`Invalid content, got %q`, entry.Content)
}
}
func TestToggleBookmarkEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, &miniflux.Filter{Limit: 1})
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
if err := regularUserClient.ToggleBookmark(result.Entries[0].ID); err != nil {
t.Fatal(err)
}
entry, err := regularUserClient.Entry(result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if !entry.Starred {
t.Fatalf(`The entry should be bookmarked`)
}
}
func TestSaveEntryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, &miniflux.Filter{Limit: 1})
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
if err := regularUserClient.SaveEntry(result.Entries[0].ID); !errors.Is(err, miniflux.ErrBadRequest) {
t.Fatalf(`Saving an entry should raise a bad request error because no integration is configured`)
}
}
func TestFetchIntegrationsStatusEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
hasIntegrations, err := regularUserClient.IntegrationsStatus()
if err != nil {
t.Fatalf("Failed to fetch integrations status: %v", err)
}
if hasIntegrations {
t.Fatalf("New user should not have integrations configured")
}
}
func TestFetchContentEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, &miniflux.Filter{Limit: 1})
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
content, err := regularUserClient.FetchEntryOriginalContent(result.Entries[0].ID)
if err != nil {
t.Fatal(err)
}
if content == "" {
t.Fatalf(`Invalid content, got empty`)
}
}
func TestFlushHistoryEndpoint(t *testing.T) {
testConfig := newIntegrationTestConfig()
if !testConfig.isConfigured() {
t.Skip(skipIntegrationTestsMessage)
}
adminClient := miniflux.NewClient(testConfig.testBaseURL, testConfig.testAdminUsername, testConfig.testAdminPassword)
regularTestUser, err := adminClient.CreateUser(testConfig.genRandomUsername(), testConfig.testRegularPassword, false)
if err != nil {
t.Fatal(err)
}
defer adminClient.DeleteUser(regularTestUser.ID)
regularUserClient := miniflux.NewClient(testConfig.testBaseURL, regularTestUser.Username, testConfig.testRegularPassword)
feedID, err := regularUserClient.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testConfig.testFeedURL,
})
if err != nil {
t.Fatal(err)
}
result, err := regularUserClient.FeedEntries(feedID, &miniflux.Filter{Limit: 3})
if err != nil {
t.Fatalf(`Failed to get entries: %v`, err)
}
if err := regularUserClient.UpdateEntries([]int64{result.Entries[0].ID, result.Entries[1].ID}, miniflux.EntryStatusRead); err != nil {
t.Fatal(err)
}
if err := regularUserClient.FlushHistory(); err != nil {
t.Fatal(err)
}
readEntries, err := regularUserClient.Entries(&miniflux.Filter{Status: miniflux.EntryStatusRead})
if err != nil {
t.Fatal(err)
}
if readEntries.Total != 0 {
t.Fatalf(`Invalid total, got %d`, readEntries.Total)
}
removedEntries, err := regularUserClient.Entries(&miniflux.Filter{Status: miniflux.EntryStatusRemoved})
if err != nil {
t.Fatal(err)
}
if removedEntries.Total != 2 {
t.Fatalf(`Invalid total, got %d`, removedEntries.Total)
}
}
v2-2.2.6/internal/api/category.go 0000664 0000000 0000000 00000007640 14756465373 0016634 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/validator"
)
func (h *handler) createCategory(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
var categoryRequest model.CategoryRequest
if err := json_parser.NewDecoder(r.Body).Decode(&categoryRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if validationErr := validator.ValidateCategoryCreation(h.store, userID, &categoryRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
category, err := h.store.CreateCategory(userID, &categoryRequest)
if err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, category)
}
func (h *handler) updateCategory(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
categoryID := request.RouteInt64Param(r, "categoryID")
category, err := h.store.Category(userID, categoryID)
if err != nil {
json.ServerError(w, r, err)
return
}
if category == nil {
json.NotFound(w, r)
return
}
var categoryRequest model.CategoryRequest
if err := json_parser.NewDecoder(r.Body).Decode(&categoryRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if validationErr := validator.ValidateCategoryModification(h.store, userID, category.ID, &categoryRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
categoryRequest.Patch(category)
err = h.store.UpdateCategory(category)
if err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, category)
}
func (h *handler) markCategoryAsRead(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
categoryID := request.RouteInt64Param(r, "categoryID")
category, err := h.store.Category(userID, categoryID)
if err != nil {
json.ServerError(w, r, err)
return
}
if category == nil {
json.NotFound(w, r)
return
}
if err = h.store.MarkCategoryAsRead(userID, categoryID, time.Now()); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) getCategories(w http.ResponseWriter, r *http.Request) {
var categories model.Categories
var err error
includeCounts := request.QueryStringParam(r, "counts", "false")
if includeCounts == "true" {
categories, err = h.store.CategoriesWithFeedCount(request.UserID(r))
} else {
categories, err = h.store.Categories(request.UserID(r))
}
if err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, categories)
}
func (h *handler) removeCategory(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
categoryID := request.RouteInt64Param(r, "categoryID")
if !h.store.CategoryIDExists(userID, categoryID) {
json.NotFound(w, r)
return
}
if err := h.store.RemoveCategory(userID, categoryID); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) refreshCategory(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
categoryID := request.RouteInt64Param(r, "categoryID")
batchBuilder := h.store.NewBatchBuilder()
batchBuilder.WithErrorLimit(config.Opts.PollingParsingErrorLimit())
batchBuilder.WithoutDisabledFeeds()
batchBuilder.WithUserID(userID)
batchBuilder.WithCategoryID(categoryID)
batchBuilder.WithNextCheckExpired()
jobs, err := batchBuilder.FetchJobs()
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Info(
"Triggered a manual refresh of all feeds for a given category from the API",
slog.Int64("user_id", userID),
slog.Int64("category_id", categoryID),
slog.Int("nb_jobs", len(jobs)),
)
go h.pool.Push(jobs)
json.NoContent(w, r)
}
v2-2.2.6/internal/api/enclosure.go 0000664 0000000 0000000 00000003373 14756465373 0017015 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/validator"
)
func (h *handler) getEnclosureByID(w http.ResponseWriter, r *http.Request) {
enclosureID := request.RouteInt64Param(r, "enclosureID")
enclosure, err := h.store.GetEnclosure(enclosureID)
if err != nil {
json.ServerError(w, r, err)
return
}
if enclosure == nil {
json.NotFound(w, r)
return
}
userID := request.UserID(r)
if enclosure.UserID != userID {
json.NotFound(w, r)
return
}
enclosure.ProxifyEnclosureURL(h.router)
json.OK(w, r, enclosure)
}
func (h *handler) updateEnclosureByID(w http.ResponseWriter, r *http.Request) {
enclosureID := request.RouteInt64Param(r, "enclosureID")
var enclosureUpdateRequest model.EnclosureUpdateRequest
if err := json_parser.NewDecoder(r.Body).Decode(&enclosureUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if err := validator.ValidateEnclosureUpdateRequest(&enclosureUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
enclosure, err := h.store.GetEnclosure(enclosureID)
if err != nil {
json.ServerError(w, r, err)
return
}
if enclosure == nil {
json.NotFound(w, r)
return
}
userID := request.UserID(r)
if enclosure.UserID != userID {
json.NotFound(w, r)
return
}
enclosure.MediaProgression = enclosureUpdateRequest.MediaProgression
if err := h.store.UpdateEnclosure(enclosure); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
v2-2.2.6/internal/api/entry.go 0000664 0000000 0000000 00000024512 14756465373 0016155 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"errors"
"net/http"
"strconv"
"time"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/integration"
"miniflux.app/v2/internal/mediaproxy"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/processor"
"miniflux.app/v2/internal/reader/readingtime"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/validator"
)
func (h *handler) getEntryFromBuilder(w http.ResponseWriter, r *http.Request, b *storage.EntryQueryBuilder) {
entry, err := b.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
}
if entry == nil {
json.NotFound(w, r)
return
}
entry.Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, entry.Content)
entry.Enclosures.ProxifyEnclosureURL(h.router)
json.OK(w, r, entry)
}
func (h *handler) getFeedEntry(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithFeedID(feedID)
builder.WithEntryID(entryID)
h.getEntryFromBuilder(w, r, builder)
}
func (h *handler) getCategoryEntry(w http.ResponseWriter, r *http.Request) {
categoryID := request.RouteInt64Param(r, "categoryID")
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithCategoryID(categoryID)
builder.WithEntryID(entryID)
h.getEntryFromBuilder(w, r, builder)
}
func (h *handler) getEntry(w http.ResponseWriter, r *http.Request) {
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithEntryID(entryID)
h.getEntryFromBuilder(w, r, builder)
}
func (h *handler) getFeedEntries(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
h.findEntries(w, r, feedID, 0)
}
func (h *handler) getCategoryEntries(w http.ResponseWriter, r *http.Request) {
categoryID := request.RouteInt64Param(r, "categoryID")
h.findEntries(w, r, 0, categoryID)
}
func (h *handler) getEntries(w http.ResponseWriter, r *http.Request) {
h.findEntries(w, r, 0, 0)
}
func (h *handler) findEntries(w http.ResponseWriter, r *http.Request, feedID int64, categoryID int64) {
statuses := request.QueryStringParamList(r, "status")
for _, status := range statuses {
if err := validator.ValidateEntryStatus(status); err != nil {
json.BadRequest(w, r, err)
return
}
}
order := request.QueryStringParam(r, "order", model.DefaultSortingOrder)
if err := validator.ValidateEntryOrder(order); err != nil {
json.BadRequest(w, r, err)
return
}
direction := request.QueryStringParam(r, "direction", model.DefaultSortingDirection)
if err := validator.ValidateDirection(direction); err != nil {
json.BadRequest(w, r, err)
return
}
limit := request.QueryIntParam(r, "limit", 100)
offset := request.QueryIntParam(r, "offset", 0)
if err := validator.ValidateRange(offset, limit); err != nil {
json.BadRequest(w, r, err)
return
}
userID := request.UserID(r)
categoryID = request.QueryInt64Param(r, "category_id", categoryID)
if categoryID > 0 && !h.store.CategoryIDExists(userID, categoryID) {
json.BadRequest(w, r, errors.New("invalid category ID"))
return
}
feedID = request.QueryInt64Param(r, "feed_id", feedID)
if feedID > 0 && !h.store.FeedExists(userID, feedID) {
json.BadRequest(w, r, errors.New("invalid feed ID"))
return
}
tags := request.QueryStringParamList(r, "tags")
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithFeedID(feedID)
builder.WithCategoryID(categoryID)
builder.WithStatuses(statuses)
builder.WithSorting(order, direction)
builder.WithOffset(offset)
builder.WithLimit(limit)
builder.WithTags(tags)
builder.WithEnclosures()
if request.HasQueryParam(r, "globally_visible") {
globallyVisible := request.QueryBoolParam(r, "globally_visible", true)
if globallyVisible {
builder.WithGloballyVisible()
}
}
configureFilters(builder, r)
entries, err := builder.GetEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
count, err := builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
for i := range entries {
entries[i].Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, entries[i].Content)
}
json.OK(w, r, &entriesResponse{Total: count, Entries: entries})
}
func (h *handler) setEntryStatus(w http.ResponseWriter, r *http.Request) {
var entriesStatusUpdateRequest model.EntriesStatusUpdateRequest
if err := json_parser.NewDecoder(r.Body).Decode(&entriesStatusUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if err := validator.ValidateEntriesStatusUpdateRequest(&entriesStatusUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if err := h.store.SetEntriesStatus(request.UserID(r), entriesStatusUpdateRequest.EntryIDs, entriesStatusUpdateRequest.Status); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) toggleBookmark(w http.ResponseWriter, r *http.Request) {
entryID := request.RouteInt64Param(r, "entryID")
if err := h.store.ToggleBookmark(request.UserID(r), entryID); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) saveEntry(w http.ResponseWriter, r *http.Request) {
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithEntryID(entryID)
builder.WithoutStatus(model.EntryStatusRemoved)
if !h.store.HasSaveEntry(request.UserID(r)) {
json.BadRequest(w, r, errors.New("no third-party integration enabled"))
return
}
entry, err := builder.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
}
if entry == nil {
json.NotFound(w, r)
return
}
settings, err := h.store.Integration(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
go integration.SendEntry(entry, settings)
json.Accepted(w, r)
}
func (h *handler) updateEntry(w http.ResponseWriter, r *http.Request) {
var entryUpdateRequest model.EntryUpdateRequest
if err := json_parser.NewDecoder(r.Body).Decode(&entryUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if err := validator.ValidateEntryModification(&entryUpdateRequest); err != nil {
json.BadRequest(w, r, err)
return
}
loggedUserID := request.UserID(r)
entryID := request.RouteInt64Param(r, "entryID")
entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID)
entryBuilder.WithEntryID(entryID)
entryBuilder.WithoutStatus(model.EntryStatusRemoved)
entry, err := entryBuilder.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
}
if entry == nil {
json.NotFound(w, r)
return
}
user, err := h.store.UserByID(loggedUserID)
if err != nil {
json.ServerError(w, r, err)
return
}
if user == nil {
json.NotFound(w, r)
return
}
entryUpdateRequest.Patch(entry)
if user.ShowReadingTime {
entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
}
if err := h.store.UpdateEntryTitleAndContent(entry); err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, entry)
}
func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
loggedUserID := request.UserID(r)
entryID := request.RouteInt64Param(r, "entryID")
entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID)
entryBuilder.WithEntryID(entryID)
entryBuilder.WithoutStatus(model.EntryStatusRemoved)
entry, err := entryBuilder.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
}
if entry == nil {
json.NotFound(w, r)
return
}
user, err := h.store.UserByID(loggedUserID)
if err != nil {
json.ServerError(w, r, err)
return
}
if user == nil {
json.NotFound(w, r)
return
}
feedBuilder := storage.NewFeedQueryBuilder(h.store, loggedUserID)
feedBuilder.WithFeedID(entry.FeedID)
feed, err := feedBuilder.GetFeed()
if err != nil {
json.ServerError(w, r, err)
return
}
if feed == nil {
json.NotFound(w, r)
return
}
if err := processor.ProcessEntryWebPage(feed, entry, user); err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, map[string]string{"content": entry.Content})
}
func (h *handler) flushHistory(w http.ResponseWriter, r *http.Request) {
loggedUserID := request.UserID(r)
go h.store.FlushHistory(loggedUserID)
json.Accepted(w, r)
}
func configureFilters(builder *storage.EntryQueryBuilder, r *http.Request) {
if beforeEntryID := request.QueryInt64Param(r, "before_entry_id", 0); beforeEntryID > 0 {
builder.BeforeEntryID(beforeEntryID)
}
if afterEntryID := request.QueryInt64Param(r, "after_entry_id", 0); afterEntryID > 0 {
builder.AfterEntryID(afterEntryID)
}
if beforePublishedTimestamp := request.QueryInt64Param(r, "before", 0); beforePublishedTimestamp > 0 {
builder.BeforePublishedDate(time.Unix(beforePublishedTimestamp, 0))
}
if afterPublishedTimestamp := request.QueryInt64Param(r, "after", 0); afterPublishedTimestamp > 0 {
builder.AfterPublishedDate(time.Unix(afterPublishedTimestamp, 0))
}
if beforePublishedTimestamp := request.QueryInt64Param(r, "published_before", 0); beforePublishedTimestamp > 0 {
builder.BeforePublishedDate(time.Unix(beforePublishedTimestamp, 0))
}
if afterPublishedTimestamp := request.QueryInt64Param(r, "published_after", 0); afterPublishedTimestamp > 0 {
builder.AfterPublishedDate(time.Unix(afterPublishedTimestamp, 0))
}
if beforeChangedTimestamp := request.QueryInt64Param(r, "changed_before", 0); beforeChangedTimestamp > 0 {
builder.BeforeChangedDate(time.Unix(beforeChangedTimestamp, 0))
}
if afterChangedTimestamp := request.QueryInt64Param(r, "changed_after", 0); afterChangedTimestamp > 0 {
builder.AfterChangedDate(time.Unix(afterChangedTimestamp, 0))
}
if categoryID := request.QueryInt64Param(r, "category_id", 0); categoryID > 0 {
builder.WithCategoryID(categoryID)
}
if request.HasQueryParam(r, "starred") {
starred, err := strconv.ParseBool(r.URL.Query().Get("starred"))
if err == nil {
builder.WithStarred(starred)
}
}
if searchQuery := request.QueryStringParam(r, "search", ""); searchQuery != "" {
builder.WithSearchQuery(searchQuery)
}
}
v2-2.2.6/internal/api/feed.go 0000664 0000000 0000000 00000012703 14756465373 0015716 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/model"
feedHandler "miniflux.app/v2/internal/reader/handler"
"miniflux.app/v2/internal/validator"
)
func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
var feedCreationRequest model.FeedCreationRequest
if err := json_parser.NewDecoder(r.Body).Decode(&feedCreationRequest); err != nil {
json.BadRequest(w, r, err)
return
}
// Make the feed category optional for clients who don't support categories.
if feedCreationRequest.CategoryID == 0 {
category, err := h.store.FirstCategory(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
feedCreationRequest.CategoryID = category.ID
}
if validationErr := validator.ValidateFeedCreation(h.store, userID, &feedCreationRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
feed, localizedError := feedHandler.CreateFeed(h.store, userID, &feedCreationRequest)
if localizedError != nil {
json.ServerError(w, r, localizedError.Error())
return
}
json.Created(w, r, &feedCreationResponse{FeedID: feed.ID})
}
func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
userID := request.UserID(r)
if !h.store.FeedExists(userID, feedID) {
json.NotFound(w, r)
return
}
localizedError := feedHandler.RefreshFeed(h.store, userID, feedID, false)
if localizedError != nil {
json.ServerError(w, r, localizedError.Error())
return
}
json.NoContent(w, r)
}
func (h *handler) refreshAllFeeds(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
batchBuilder := h.store.NewBatchBuilder()
batchBuilder.WithErrorLimit(config.Opts.PollingParsingErrorLimit())
batchBuilder.WithoutDisabledFeeds()
batchBuilder.WithNextCheckExpired()
batchBuilder.WithUserID(userID)
jobs, err := batchBuilder.FetchJobs()
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Info(
"Triggered a manual refresh of all feeds from the API",
slog.Int64("user_id", userID),
slog.Int("nb_jobs", len(jobs)),
)
go h.pool.Push(jobs)
json.NoContent(w, r)
}
func (h *handler) updateFeed(w http.ResponseWriter, r *http.Request) {
var feedModificationRequest model.FeedModificationRequest
if err := json_parser.NewDecoder(r.Body).Decode(&feedModificationRequest); err != nil {
json.BadRequest(w, r, err)
return
}
userID := request.UserID(r)
feedID := request.RouteInt64Param(r, "feedID")
originalFeed, err := h.store.FeedByID(userID, feedID)
if err != nil {
json.NotFound(w, r)
return
}
if originalFeed == nil {
json.NotFound(w, r)
return
}
if validationErr := validator.ValidateFeedModification(h.store, userID, originalFeed.ID, &feedModificationRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
feedModificationRequest.Patch(originalFeed)
originalFeed.ResetErrorCounter()
if err := h.store.UpdateFeed(originalFeed); err != nil {
json.ServerError(w, r, err)
return
}
originalFeed, err = h.store.FeedByID(userID, feedID)
if err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, originalFeed)
}
func (h *handler) markFeedAsRead(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
userID := request.UserID(r)
feed, err := h.store.FeedByID(userID, feedID)
if err != nil {
json.NotFound(w, r)
return
}
if feed == nil {
json.NotFound(w, r)
return
}
if err := h.store.MarkFeedAsRead(userID, feedID, time.Now()); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) getCategoryFeeds(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
categoryID := request.RouteInt64Param(r, "categoryID")
category, err := h.store.Category(userID, categoryID)
if err != nil {
json.ServerError(w, r, err)
return
}
if category == nil {
json.NotFound(w, r)
return
}
feeds, err := h.store.FeedsByCategoryWithCounters(userID, categoryID)
if err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, feeds)
}
func (h *handler) getFeeds(w http.ResponseWriter, r *http.Request) {
feeds, err := h.store.Feeds(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, feeds)
}
func (h *handler) fetchCounters(w http.ResponseWriter, r *http.Request) {
counters, err := h.store.FetchCounters(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, counters)
}
func (h *handler) getFeed(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
feed, err := h.store.FeedByID(request.UserID(r), feedID)
if err != nil {
json.ServerError(w, r, err)
return
}
if feed == nil {
json.NotFound(w, r)
return
}
json.OK(w, r, feed)
}
func (h *handler) removeFeed(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
userID := request.UserID(r)
if !h.store.FeedExists(userID, feedID) {
json.NotFound(w, r)
return
}
if err := h.store.RemoveFeed(userID, feedID); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
v2-2.2.6/internal/api/icon.go 0000664 0000000 0000000 00000002217 14756465373 0015742 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
)
func (h *handler) getIconByFeedID(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
if !h.store.HasFeedIcon(feedID) {
json.NotFound(w, r)
return
}
icon, err := h.store.IconByFeedID(request.UserID(r), feedID)
if err != nil {
json.ServerError(w, r, err)
return
}
if icon == nil {
json.NotFound(w, r)
return
}
json.OK(w, r, &feedIconResponse{
ID: icon.ID,
MimeType: icon.MimeType,
Data: icon.DataURL(),
})
}
func (h *handler) getIconByIconID(w http.ResponseWriter, r *http.Request) {
iconID := request.RouteInt64Param(r, "iconID")
icon, err := h.store.IconByID(iconID)
if err != nil {
json.ServerError(w, r, err)
return
}
if icon == nil {
json.NotFound(w, r)
return
}
json.OK(w, r, &feedIconResponse{
ID: icon.ID,
MimeType: icon.MimeType,
Data: icon.DataURL(),
})
}
v2-2.2.6/internal/api/middleware.go 0000664 0000000 0000000 00000011530 14756465373 0017125 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"context"
"log/slog"
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/storage"
)
type middleware struct {
store *storage.Storage
}
func newMiddleware(s *storage.Storage) *middleware {
return &middleware{s}
}
func (m *middleware) handleCORS(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "X-Auth-Token, Authorization, Content-Type, Accept")
if r.Method == http.MethodOptions {
w.Header().Set("Access-Control-Max-Age", "3600")
w.WriteHeader(http.StatusOK)
return
}
next.ServeHTTP(w, r)
})
}
func (m *middleware) apiKeyAuth(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
token := r.Header.Get("X-Auth-Token")
if token == "" {
slog.Debug("[API] Skipped API token authentication because no API Key has been provided",
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
next.ServeHTTP(w, r)
return
}
user, err := m.store.UserByAPIKey(token)
if err != nil {
json.ServerError(w, r, err)
return
}
if user == nil {
slog.Warn("[API] No user found with the provided API key",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.Unauthorized(w, r)
return
}
slog.Info("[API] User authenticated successfully with the API Token Authentication",
slog.Bool("authentication_successful", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", user.Username),
)
m.store.SetLastLogin(user.ID)
m.store.SetAPIKeyUsedTimestamp(user.ID, token)
ctx := r.Context()
ctx = context.WithValue(ctx, request.UserIDContextKey, user.ID)
ctx = context.WithValue(ctx, request.UserTimezoneContextKey, user.Timezone)
ctx = context.WithValue(ctx, request.IsAdminUserContextKey, user.IsAdmin)
ctx = context.WithValue(ctx, request.IsAuthenticatedContextKey, true)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func (m *middleware) basicAuth(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if request.IsAuthenticated(r) {
next.ServeHTTP(w, r)
return
}
w.Header().Set("WWW-Authenticate", `Basic realm="Restricted"`)
clientIP := request.ClientIP(r)
username, password, authOK := r.BasicAuth()
if !authOK {
slog.Warn("[API] No Basic HTTP Authentication header sent with the request",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.Unauthorized(w, r)
return
}
if username == "" || password == "" {
slog.Warn("[API] Empty username or password provided during Basic HTTP Authentication",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.Unauthorized(w, r)
return
}
if err := m.store.CheckPassword(username, password); err != nil {
slog.Warn("[API] Invalid username or password provided during Basic HTTP Authentication",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
)
json.Unauthorized(w, r)
return
}
user, err := m.store.UserByUsername(username)
if err != nil {
json.ServerError(w, r, err)
return
}
if user == nil {
slog.Warn("[API] User not found while using Basic HTTP Authentication",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
)
json.Unauthorized(w, r)
return
}
slog.Info("[API] User authenticated successfully with the Basic HTTP Authentication",
slog.Bool("authentication_successful", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
)
m.store.SetLastLogin(user.ID)
ctx := r.Context()
ctx = context.WithValue(ctx, request.UserIDContextKey, user.ID)
ctx = context.WithValue(ctx, request.UserTimezoneContextKey, user.Timezone)
ctx = context.WithValue(ctx, request.IsAdminUserContextKey, user.IsAdmin)
ctx = context.WithValue(ctx, request.IsAuthenticatedContextKey, true)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
v2-2.2.6/internal/api/opml.go 0000664 0000000 0000000 00000001705 14756465373 0015762 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/http/response/xml"
"miniflux.app/v2/internal/reader/opml"
)
func (h *handler) exportFeeds(w http.ResponseWriter, r *http.Request) {
opmlHandler := opml.NewHandler(h.store)
opmlExport, err := opmlHandler.Export(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
xml.OK(w, r, opmlExport)
}
func (h *handler) importFeeds(w http.ResponseWriter, r *http.Request) {
opmlHandler := opml.NewHandler(h.store)
err := opmlHandler.Import(request.UserID(r), r.Body)
defer r.Body.Close()
if err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, map[string]string{"message": "Feeds imported successfully"})
}
v2-2.2.6/internal/api/payload.go 0000664 0000000 0000000 00000001445 14756465373 0016445 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
"miniflux.app/v2/internal/model"
)
type feedIconResponse struct {
ID int64 `json:"id"`
MimeType string `json:"mime_type"`
Data string `json:"data"`
}
type entriesResponse struct {
Total int `json:"total"`
Entries model.Entries `json:"entries"`
}
type feedCreationResponse struct {
FeedID int64 `json:"feed_id"`
}
type versionResponse struct {
Version string `json:"version"`
Commit string `json:"commit"`
BuildDate string `json:"build_date"`
GoVersion string `json:"go_version"`
Compiler string `json:"compiler"`
Arch string `json:"arch"`
OS string `json:"os"`
}
v2-2.2.6/internal/api/subscription.go 0000664 0000000 0000000 00000004206 14756465373 0017536 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"net/http"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/fetcher"
"miniflux.app/v2/internal/reader/subscription"
"miniflux.app/v2/internal/validator"
)
func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request) {
var subscriptionDiscoveryRequest model.SubscriptionDiscoveryRequest
if err := json_parser.NewDecoder(r.Body).Decode(&subscriptionDiscoveryRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if validationErr := validator.ValidateSubscriptionDiscovery(&subscriptionDiscoveryRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
var rssbridgeURL string
intg, err := h.store.Integration(request.UserID(r))
if err == nil && intg != nil && intg.RSSBridgeEnabled {
rssbridgeURL = intg.RSSBridgeURL
}
requestBuilder := fetcher.NewRequestBuilder()
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
requestBuilder.WithUserAgent(subscriptionDiscoveryRequest.UserAgent, config.Opts.HTTPClientUserAgent())
requestBuilder.WithCookie(subscriptionDiscoveryRequest.Cookie)
requestBuilder.WithUsernameAndPassword(subscriptionDiscoveryRequest.Username, subscriptionDiscoveryRequest.Password)
requestBuilder.UseProxy(subscriptionDiscoveryRequest.FetchViaProxy)
requestBuilder.IgnoreTLSErrors(subscriptionDiscoveryRequest.AllowSelfSignedCertificates)
requestBuilder.DisableHTTP2(subscriptionDiscoveryRequest.DisableHTTP2)
subscriptions, localizedError := subscription.NewSubscriptionFinder(requestBuilder).FindSubscriptions(
subscriptionDiscoveryRequest.URL,
rssbridgeURL,
)
if localizedError != nil {
json.ServerError(w, r, localizedError.Error())
return
}
if len(subscriptions) == 0 {
json.NotFound(w, r)
return
}
json.OK(w, r, subscriptions)
}
v2-2.2.6/internal/api/user.go 0000664 0000000 0000000 00000013133 14756465373 0015767 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package api // import "miniflux.app/v2/internal/api"
import (
json_parser "encoding/json"
"errors"
"net/http"
"regexp"
"strings"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/validator"
)
func (h *handler) currentUser(w http.ResponseWriter, r *http.Request) {
user, err := h.store.UserByID(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
json.OK(w, r, user)
}
func (h *handler) createUser(w http.ResponseWriter, r *http.Request) {
if !request.IsAdminUser(r) {
json.Forbidden(w, r)
return
}
var userCreationRequest model.UserCreationRequest
if err := json_parser.NewDecoder(r.Body).Decode(&userCreationRequest); err != nil {
json.BadRequest(w, r, err)
return
}
if validationErr := validator.ValidateUserCreationWithPassword(h.store, &userCreationRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
user, err := h.store.CreateUser(&userCreationRequest)
if err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, user)
}
func (h *handler) updateUser(w http.ResponseWriter, r *http.Request) {
userID := request.RouteInt64Param(r, "userID")
var userModificationRequest model.UserModificationRequest
if err := json_parser.NewDecoder(r.Body).Decode(&userModificationRequest); err != nil {
json.BadRequest(w, r, err)
return
}
originalUser, err := h.store.UserByID(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
if originalUser == nil {
json.NotFound(w, r)
return
}
if !request.IsAdminUser(r) {
if originalUser.ID != request.UserID(r) {
json.Forbidden(w, r)
return
}
if userModificationRequest.IsAdmin != nil && *userModificationRequest.IsAdmin {
json.BadRequest(w, r, errors.New("only administrators can change permissions of standard users"))
return
}
}
cleanEnd := regexp.MustCompile(`(?m)\r\n\s*$`)
if userModificationRequest.BlockFilterEntryRules != nil {
*userModificationRequest.BlockFilterEntryRules = cleanEnd.ReplaceAllLiteralString(*userModificationRequest.BlockFilterEntryRules, "")
// Clean carriage returns for Windows environments
*userModificationRequest.BlockFilterEntryRules = strings.ReplaceAll(*userModificationRequest.BlockFilterEntryRules, "\r\n", "\n")
}
if userModificationRequest.KeepFilterEntryRules != nil {
*userModificationRequest.KeepFilterEntryRules = cleanEnd.ReplaceAllLiteralString(*userModificationRequest.KeepFilterEntryRules, "")
// Clean carriage returns for Windows environments
*userModificationRequest.KeepFilterEntryRules = strings.ReplaceAll(*userModificationRequest.KeepFilterEntryRules, "\r\n", "\n")
}
if validationErr := validator.ValidateUserModification(h.store, originalUser.ID, &userModificationRequest); validationErr != nil {
json.BadRequest(w, r, validationErr.Error())
return
}
userModificationRequest.Patch(originalUser)
if err = h.store.UpdateUser(originalUser); err != nil {
json.ServerError(w, r, err)
return
}
json.Created(w, r, originalUser)
}
func (h *handler) markUserAsRead(w http.ResponseWriter, r *http.Request) {
userID := request.RouteInt64Param(r, "userID")
if userID != request.UserID(r) {
json.Forbidden(w, r)
return
}
if _, err := h.store.UserByID(userID); err != nil {
json.NotFound(w, r)
return
}
if err := h.store.MarkAllAsRead(userID); err != nil {
json.ServerError(w, r, err)
return
}
json.NoContent(w, r)
}
func (h *handler) getIntegrationsStatus(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
if _, err := h.store.UserByID(userID); err != nil {
json.NotFound(w, r)
return
}
hasIntegrations := h.store.HasSaveEntry(userID)
response := struct {
HasIntegrations bool `json:"has_integrations"`
}{
HasIntegrations: hasIntegrations,
}
json.OK(w, r, response)
}
func (h *handler) users(w http.ResponseWriter, r *http.Request) {
if !request.IsAdminUser(r) {
json.Forbidden(w, r)
return
}
users, err := h.store.Users()
if err != nil {
json.ServerError(w, r, err)
return
}
users.UseTimezone(request.UserTimezone(r))
json.OK(w, r, users)
}
func (h *handler) userByID(w http.ResponseWriter, r *http.Request) {
if !request.IsAdminUser(r) {
json.Forbidden(w, r)
return
}
userID := request.RouteInt64Param(r, "userID")
user, err := h.store.UserByID(userID)
if err != nil {
json.BadRequest(w, r, errors.New("unable to fetch this user from the database"))
return
}
if user == nil {
json.NotFound(w, r)
return
}
user.UseTimezone(request.UserTimezone(r))
json.OK(w, r, user)
}
func (h *handler) userByUsername(w http.ResponseWriter, r *http.Request) {
if !request.IsAdminUser(r) {
json.Forbidden(w, r)
return
}
username := request.RouteStringParam(r, "username")
user, err := h.store.UserByUsername(username)
if err != nil {
json.BadRequest(w, r, errors.New("unable to fetch this user from the database"))
return
}
if user == nil {
json.NotFound(w, r)
return
}
json.OK(w, r, user)
}
func (h *handler) removeUser(w http.ResponseWriter, r *http.Request) {
if !request.IsAdminUser(r) {
json.Forbidden(w, r)
return
}
userID := request.RouteInt64Param(r, "userID")
user, err := h.store.UserByID(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
if user == nil {
json.NotFound(w, r)
return
}
if user.ID == request.UserID(r) {
json.BadRequest(w, r, errors.New("you cannot remove yourself"))
return
}
h.store.RemoveUserAsync(user.ID)
json.NoContent(w, r)
}
v2-2.2.6/internal/cli/ 0000775 0000000 0000000 00000000000 14756465373 0014457 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/cli/ask_credentials.go 0000664 0000000 0000000 00000001374 14756465373 0020146 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"bufio"
"fmt"
"os"
"strings"
"golang.org/x/term"
)
func askCredentials() (string, string) {
fd := int(os.Stdin.Fd())
if !term.IsTerminal(fd) {
printErrorAndExit(fmt.Errorf("this is not an interactive terminal, exiting"))
}
fmt.Print("Enter Username: ")
reader := bufio.NewReader(os.Stdin)
username, _ := reader.ReadString('\n')
fmt.Print("Enter Password: ")
state, _ := term.GetState(fd)
defer term.Restore(fd, state)
bytePassword, _ := term.ReadPassword(fd)
fmt.Printf("\n")
return strings.TrimSpace(username), strings.TrimSpace(string(bytePassword))
}
v2-2.2.6/internal/cli/cleanup_tasks.go 0000664 0000000 0000000 00000003410 14756465373 0017640 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"log/slog"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/metric"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
)
func runCleanupTasks(store *storage.Storage) {
nbSessions := store.CleanOldSessions(config.Opts.CleanupRemoveSessionsDays())
nbUserSessions := store.CleanOldUserSessions(config.Opts.CleanupRemoveSessionsDays())
slog.Info("Sessions cleanup completed",
slog.Int64("application_sessions_removed", nbSessions),
slog.Int64("user_sessions_removed", nbUserSessions),
)
startTime := time.Now()
if rowsAffected, err := store.ArchiveEntries(model.EntryStatusRead, config.Opts.CleanupArchiveReadDays(), config.Opts.CleanupArchiveBatchSize()); err != nil {
slog.Error("Unable to archive read entries", slog.Any("error", err))
} else {
slog.Info("Archiving read entries completed",
slog.Int64("read_entries_archived", rowsAffected),
)
if config.Opts.HasMetricsCollector() {
metric.ArchiveEntriesDuration.WithLabelValues(model.EntryStatusRead).Observe(time.Since(startTime).Seconds())
}
}
startTime = time.Now()
if rowsAffected, err := store.ArchiveEntries(model.EntryStatusUnread, config.Opts.CleanupArchiveUnreadDays(), config.Opts.CleanupArchiveBatchSize()); err != nil {
slog.Error("Unable to archive unread entries", slog.Any("error", err))
} else {
slog.Info("Archiving unread entries completed",
slog.Int64("unread_entries_archived", rowsAffected),
)
if config.Opts.HasMetricsCollector() {
metric.ArchiveEntriesDuration.WithLabelValues(model.EntryStatusUnread).Observe(time.Since(startTime).Seconds())
}
}
}
v2-2.2.6/internal/cli/cli.go 0000664 0000000 0000000 00000016115 14756465373 0015561 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"errors"
"flag"
"fmt"
"io"
"log/slog"
"os"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/database"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/ui/static"
"miniflux.app/v2/internal/version"
)
const (
flagInfoHelp = "Show build information"
flagVersionHelp = "Show application version"
flagMigrateHelp = "Run SQL migrations"
flagFlushSessionsHelp = "Flush all sessions (disconnect users)"
flagCreateAdminHelp = "Create an admin user from an interactive terminal"
flagResetPasswordHelp = "Reset user password"
flagResetFeedErrorsHelp = "Clear all feed errors for all users"
flagDebugModeHelp = "Show debug logs"
flagConfigFileHelp = "Load configuration file"
flagConfigDumpHelp = "Print parsed configuration values"
flagHealthCheckHelp = `Perform a health check on the given endpoint (the value "auto" try to guess the health check endpoint).`
flagRefreshFeedsHelp = "Refresh a batch of feeds and exit"
flagRunCleanupTasksHelp = "Run cleanup tasks (delete old sessions and archives old entries)"
flagExportUserFeedsHelp = "Export user feeds (provide the username as argument)"
)
// Parse parses command line arguments.
func Parse() {
var (
err error
flagInfo bool
flagVersion bool
flagMigrate bool
flagFlushSessions bool
flagCreateAdmin bool
flagResetPassword bool
flagResetFeedErrors bool
flagDebugMode bool
flagConfigFile string
flagConfigDump bool
flagHealthCheck string
flagRefreshFeeds bool
flagRunCleanupTasks bool
flagExportUserFeeds string
)
flag.BoolVar(&flagInfo, "info", false, flagInfoHelp)
flag.BoolVar(&flagInfo, "i", false, flagInfoHelp)
flag.BoolVar(&flagVersion, "version", false, flagVersionHelp)
flag.BoolVar(&flagVersion, "v", false, flagVersionHelp)
flag.BoolVar(&flagMigrate, "migrate", false, flagMigrateHelp)
flag.BoolVar(&flagFlushSessions, "flush-sessions", false, flagFlushSessionsHelp)
flag.BoolVar(&flagCreateAdmin, "create-admin", false, flagCreateAdminHelp)
flag.BoolVar(&flagResetPassword, "reset-password", false, flagResetPasswordHelp)
flag.BoolVar(&flagResetFeedErrors, "reset-feed-errors", false, flagResetFeedErrorsHelp)
flag.BoolVar(&flagDebugMode, "debug", false, flagDebugModeHelp)
flag.StringVar(&flagConfigFile, "config-file", "", flagConfigFileHelp)
flag.StringVar(&flagConfigFile, "c", "", flagConfigFileHelp)
flag.BoolVar(&flagConfigDump, "config-dump", false, flagConfigDumpHelp)
flag.StringVar(&flagHealthCheck, "healthcheck", "", flagHealthCheckHelp)
flag.BoolVar(&flagRefreshFeeds, "refresh-feeds", false, flagRefreshFeedsHelp)
flag.BoolVar(&flagRunCleanupTasks, "run-cleanup-tasks", false, flagRunCleanupTasksHelp)
flag.StringVar(&flagExportUserFeeds, "export-user-feeds", "", flagExportUserFeedsHelp)
flag.Parse()
cfg := config.NewParser()
if flagConfigFile != "" {
config.Opts, err = cfg.ParseFile(flagConfigFile)
if err != nil {
printErrorAndExit(err)
}
}
config.Opts, err = cfg.ParseEnvironmentVariables()
if err != nil {
printErrorAndExit(err)
}
if oauth2Provider := config.Opts.OAuth2Provider(); oauth2Provider != "" {
if oauth2Provider != "oidc" && oauth2Provider != "google" {
printErrorAndExit(fmt.Errorf(`unsupported OAuth2 provider: %q (Possible values are "google" or "oidc")`, oauth2Provider))
}
}
if config.Opts.DisableLocalAuth() {
switch {
case config.Opts.OAuth2Provider() == "" && config.Opts.AuthProxyHeader() == "":
printErrorAndExit(errors.New("DISABLE_LOCAL_AUTH is enabled but neither OAUTH2_PROVIDER nor AUTH_PROXY_HEADER is not set. Please enable at least one authentication source"))
case config.Opts.OAuth2Provider() != "" && !config.Opts.IsOAuth2UserCreationAllowed():
printErrorAndExit(errors.New("DISABLE_LOCAL_AUTH is enabled and an OAUTH2_PROVIDER is configured, but OAUTH2_USER_CREATION is not enabled"))
case config.Opts.AuthProxyHeader() != "" && !config.Opts.IsAuthProxyUserCreationAllowed():
printErrorAndExit(errors.New("DISABLE_LOCAL_AUTH is enabled and an AUTH_PROXY_HEADER is configured, but AUTH_PROXY_USER_CREATION is not enabled"))
}
}
if flagConfigDump {
fmt.Print(config.Opts)
return
}
if flagDebugMode {
config.Opts.SetLogLevel("debug")
}
logFile := config.Opts.LogFile()
var logFileHandler io.Writer
switch logFile {
case "stdout":
logFileHandler = os.Stdout
case "stderr":
logFileHandler = os.Stderr
default:
logFileHandler, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600)
if err != nil {
printErrorAndExit(fmt.Errorf("unable to open log file: %v", err))
}
defer logFileHandler.(*os.File).Close()
}
if err := InitializeDefaultLogger(config.Opts.LogLevel(), logFileHandler, config.Opts.LogFormat(), config.Opts.LogDateTime()); err != nil {
printErrorAndExit(err)
}
if flagHealthCheck != "" {
doHealthCheck(flagHealthCheck)
return
}
if flagInfo {
info()
return
}
if flagVersion {
fmt.Println(version.Version)
return
}
if config.Opts.IsDefaultDatabaseURL() {
slog.Info("The default value for DATABASE_URL is used")
}
if err := static.CalculateBinaryFileChecksums(); err != nil {
printErrorAndExit(fmt.Errorf("unable to calculate binary file checksums: %v", err))
}
if err := static.GenerateStylesheetsBundles(); err != nil {
printErrorAndExit(fmt.Errorf("unable to generate stylesheets bundles: %v", err))
}
if err := static.GenerateJavascriptBundles(); err != nil {
printErrorAndExit(fmt.Errorf("unable to generate javascript bundles: %v", err))
}
db, err := database.NewConnectionPool(
config.Opts.DatabaseURL(),
config.Opts.DatabaseMinConns(),
config.Opts.DatabaseMaxConns(),
config.Opts.DatabaseConnectionLifetime(),
)
if err != nil {
printErrorAndExit(fmt.Errorf("unable to connect to database: %v", err))
}
defer db.Close()
store := storage.NewStorage(db)
if err := store.Ping(); err != nil {
printErrorAndExit(err)
}
if flagMigrate {
if err := database.Migrate(db); err != nil {
printErrorAndExit(err)
}
return
}
if flagResetFeedErrors {
store.ResetFeedErrors()
return
}
if flagExportUserFeeds != "" {
exportUserFeeds(store, flagExportUserFeeds)
return
}
if flagFlushSessions {
flushSessions(store)
return
}
if flagCreateAdmin {
createAdminUserFromInteractiveTerminal(store)
return
}
if flagResetPassword {
resetPassword(store)
return
}
// Run migrations and start the daemon.
if config.Opts.RunMigrations() {
if err := database.Migrate(db); err != nil {
printErrorAndExit(err)
}
}
if err := database.IsSchemaUpToDate(db); err != nil {
printErrorAndExit(err)
}
if config.Opts.CreateAdmin() {
createAdminUserFromEnvironmentVariables(store)
}
if flagRefreshFeeds {
refreshFeeds(store)
return
}
if flagRunCleanupTasks {
runCleanupTasks(store)
return
}
startDaemon(store)
}
func printErrorAndExit(err error) {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
v2-2.2.6/internal/cli/create_admin.go 0000664 0000000 0000000 00000002635 14756465373 0017427 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"log/slog"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/validator"
)
func createAdminUserFromEnvironmentVariables(store *storage.Storage) {
createAdminUser(store, config.Opts.AdminUsername(), config.Opts.AdminPassword())
}
func createAdminUserFromInteractiveTerminal(store *storage.Storage) {
username, password := askCredentials()
createAdminUser(store, username, password)
}
func createAdminUser(store *storage.Storage, username, password string) {
userCreationRequest := &model.UserCreationRequest{
Username: username,
Password: password,
IsAdmin: true,
}
if store.UserExists(userCreationRequest.Username) {
slog.Info("Skipping admin user creation because it already exists",
slog.String("username", userCreationRequest.Username),
)
return
}
if validationErr := validator.ValidateUserCreationWithPassword(store, userCreationRequest); validationErr != nil {
printErrorAndExit(validationErr.Error())
}
if user, err := store.CreateUser(userCreationRequest); err != nil {
printErrorAndExit(err)
} else {
slog.Info("Created new admin user",
slog.String("username", user.Username),
slog.Int64("user_id", user.ID),
)
}
}
v2-2.2.6/internal/cli/daemon.go 0000664 0000000 0000000 00000004147 14756465373 0016257 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"context"
"log/slog"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"miniflux.app/v2/internal/config"
httpd "miniflux.app/v2/internal/http/server"
"miniflux.app/v2/internal/metric"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/systemd"
"miniflux.app/v2/internal/worker"
)
func startDaemon(store *storage.Storage) {
slog.Debug("Starting daemon...")
stop := make(chan os.Signal, 1)
signal.Notify(stop, os.Interrupt)
signal.Notify(stop, syscall.SIGTERM)
pool := worker.NewPool(store, config.Opts.WorkerPoolSize())
if config.Opts.HasSchedulerService() && !config.Opts.HasMaintenanceMode() {
runScheduler(store, pool)
}
var httpServer *http.Server
if config.Opts.HasHTTPService() {
httpServer = httpd.StartWebServer(store, pool)
}
if config.Opts.HasMetricsCollector() {
collector := metric.NewCollector(store, config.Opts.MetricsRefreshInterval())
go collector.GatherStorageMetrics()
}
if systemd.HasNotifySocket() {
slog.Debug("Sending readiness notification to Systemd")
if err := systemd.SdNotify(systemd.SdNotifyReady); err != nil {
slog.Error("Unable to send readiness notification to systemd", slog.Any("error", err))
}
if config.Opts.HasWatchdog() && systemd.HasSystemdWatchdog() {
slog.Debug("Activating Systemd watchdog")
go func() {
interval, err := systemd.WatchdogInterval()
if err != nil {
slog.Error("Unable to get watchdog interval from systemd", slog.Any("error", err))
return
}
for {
if err := store.Ping(); err != nil {
slog.Error("Unable to ping database", slog.Any("error", err))
} else {
systemd.SdNotify(systemd.SdNotifyWatchdog)
}
time.Sleep(interval / 3)
}
}()
}
}
<-stop
slog.Debug("Shutting down the process")
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if httpServer != nil {
httpServer.Shutdown(ctx)
}
slog.Debug("Process gracefully stopped")
}
v2-2.2.6/internal/cli/export_feeds.go 0000664 0000000 0000000 00000001357 14756465373 0017503 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"fmt"
"miniflux.app/v2/internal/reader/opml"
"miniflux.app/v2/internal/storage"
)
func exportUserFeeds(store *storage.Storage, username string) {
user, err := store.UserByUsername(username)
if err != nil {
printErrorAndExit(fmt.Errorf("unable to find user: %w", err))
}
if user == nil {
printErrorAndExit(fmt.Errorf("user %q not found", username))
}
opmlHandler := opml.NewHandler(store)
opmlExport, err := opmlHandler.Export(user.ID)
if err != nil {
printErrorAndExit(fmt.Errorf("unable to export feeds: %w", err))
}
fmt.Println(opmlExport)
}
v2-2.2.6/internal/cli/flush_sessions.go 0000664 0000000 0000000 00000000634 14756465373 0020060 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"fmt"
"miniflux.app/v2/internal/storage"
)
func flushSessions(store *storage.Storage) {
fmt.Println("Flushing all sessions (disconnect users)")
if err := store.FlushAllSessions(); err != nil {
printErrorAndExit(err)
}
}
v2-2.2.6/internal/cli/health_check.go 0000664 0000000 0000000 00000001627 14756465373 0017416 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"fmt"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/config"
)
func doHealthCheck(healthCheckEndpoint string) {
if healthCheckEndpoint == "auto" {
healthCheckEndpoint = "http://" + config.Opts.ListenAddr() + config.Opts.BasePath() + "/healthcheck"
}
slog.Debug("Executing health check request", slog.String("endpoint", healthCheckEndpoint))
client := &http.Client{Timeout: 3 * time.Second}
resp, err := client.Get(healthCheckEndpoint)
if err != nil {
printErrorAndExit(fmt.Errorf(`health check failure: %v`, err))
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
printErrorAndExit(fmt.Errorf(`health check failed with status code %d`, resp.StatusCode))
}
slog.Debug(`Health check is passing`)
}
v2-2.2.6/internal/cli/info.go 0000664 0000000 0000000 00000001045 14756465373 0015741 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"fmt"
"runtime"
"miniflux.app/v2/internal/version"
)
func info() {
fmt.Println("Version:", version.Version)
fmt.Println("Commit:", version.Commit)
fmt.Println("Build Date:", version.BuildDate)
fmt.Println("Go Version:", runtime.Version())
fmt.Println("Compiler:", runtime.Compiler)
fmt.Println("Arch:", runtime.GOARCH)
fmt.Println("OS:", runtime.GOOS)
}
v2-2.2.6/internal/cli/logger.go 0000664 0000000 0000000 00000002072 14756465373 0016266 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"io"
"log/slog"
)
func InitializeDefaultLogger(logLevel string, logFile io.Writer, logFormat string, logTime bool) error {
var programLogLevel = new(slog.LevelVar)
switch logLevel {
case "debug":
programLogLevel.Set(slog.LevelDebug)
case "info":
programLogLevel.Set(slog.LevelInfo)
case "warning":
programLogLevel.Set(slog.LevelWarn)
case "error":
programLogLevel.Set(slog.LevelError)
}
logHandlerOptions := &slog.HandlerOptions{Level: programLogLevel}
if !logTime {
logHandlerOptions.ReplaceAttr = func(groups []string, a slog.Attr) slog.Attr {
if a.Key == slog.TimeKey {
return slog.Attr{}
}
return a
}
}
var logger *slog.Logger
switch logFormat {
case "json":
logger = slog.New(slog.NewJSONHandler(logFile, logHandlerOptions))
default:
logger = slog.New(slog.NewTextHandler(logFile, logHandlerOptions))
}
slog.SetDefault(logger)
return nil
}
v2-2.2.6/internal/cli/refresh_feeds.go 0000664 0000000 0000000 00000003765 14756465373 0017625 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"log/slog"
"sync"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/model"
feedHandler "miniflux.app/v2/internal/reader/handler"
"miniflux.app/v2/internal/storage"
)
func refreshFeeds(store *storage.Storage) {
var wg sync.WaitGroup
startTime := time.Now()
// Generate a batch of feeds for any user that has feeds to refresh.
batchBuilder := store.NewBatchBuilder()
batchBuilder.WithBatchSize(config.Opts.BatchSize())
batchBuilder.WithErrorLimit(config.Opts.PollingParsingErrorLimit())
batchBuilder.WithoutDisabledFeeds()
batchBuilder.WithNextCheckExpired()
jobs, err := batchBuilder.FetchJobs()
if err != nil {
slog.Error("Unable to fetch jobs from database", slog.Any("error", err))
return
}
nbJobs := len(jobs)
slog.Info("Created a batch of feeds",
slog.Int("nb_jobs", nbJobs),
slog.Int("batch_size", config.Opts.BatchSize()),
)
var jobQueue = make(chan model.Job, nbJobs)
slog.Info("Starting a pool of workers",
slog.Int("nb_workers", config.Opts.WorkerPoolSize()),
)
for i := range config.Opts.WorkerPoolSize() {
wg.Add(1)
go func(workerID int) {
defer wg.Done()
for job := range jobQueue {
slog.Info("Refreshing feed",
slog.Int64("feed_id", job.FeedID),
slog.Int64("user_id", job.UserID),
slog.Int("worker_id", workerID),
)
if localizedError := feedHandler.RefreshFeed(store, job.UserID, job.FeedID, false); localizedError != nil {
slog.Warn("Unable to refresh feed",
slog.Int64("feed_id", job.FeedID),
slog.Int64("user_id", job.UserID),
slog.Any("error", localizedError.Error()),
)
}
}
}(i)
}
for _, job := range jobs {
jobQueue <- job
}
close(jobQueue)
wg.Wait()
slog.Info("Refreshed a batch of feeds",
slog.Int("nb_feeds", nbJobs),
slog.String("duration", time.Since(startTime).String()),
)
}
v2-2.2.6/internal/cli/reset_password.go 0000664 0000000 0000000 00000001657 14756465373 0020063 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"fmt"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/validator"
)
func resetPassword(store *storage.Storage) {
username, password := askCredentials()
user, err := store.UserByUsername(username)
if err != nil {
printErrorAndExit(err)
}
if user == nil {
printErrorAndExit(fmt.Errorf("user not found"))
}
userModificationRequest := &model.UserModificationRequest{
Password: &password,
}
if validationErr := validator.ValidateUserModification(store, user.ID, userModificationRequest); validationErr != nil {
printErrorAndExit(validationErr.Error())
}
user.Password = password
if err := store.UpdateUser(user); err != nil {
printErrorAndExit(err)
}
fmt.Println("Password changed!")
}
v2-2.2.6/internal/cli/scheduler.go 0000664 0000000 0000000 00000002743 14756465373 0016772 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cli // import "miniflux.app/v2/internal/cli"
import (
"log/slog"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/worker"
)
func runScheduler(store *storage.Storage, pool *worker.Pool) {
slog.Debug(`Starting background scheduler...`)
go feedScheduler(
store,
pool,
config.Opts.PollingFrequency(),
config.Opts.BatchSize(),
config.Opts.PollingParsingErrorLimit(),
)
go cleanupScheduler(
store,
config.Opts.CleanupFrequencyHours(),
)
}
func feedScheduler(store *storage.Storage, pool *worker.Pool, frequency, batchSize, errorLimit int) {
for range time.Tick(time.Duration(frequency) * time.Minute) {
// Generate a batch of feeds for any user that has feeds to refresh.
batchBuilder := store.NewBatchBuilder()
batchBuilder.WithBatchSize(batchSize)
batchBuilder.WithErrorLimit(errorLimit)
batchBuilder.WithoutDisabledFeeds()
batchBuilder.WithNextCheckExpired()
if jobs, err := batchBuilder.FetchJobs(); err != nil {
slog.Error("Unable to fetch jobs from database", slog.Any("error", err))
} else if len(jobs) > 0 {
slog.Info("Created a batch of feeds",
slog.Int("nb_jobs", len(jobs)),
)
pool.Push(jobs)
}
}
}
func cleanupScheduler(store *storage.Storage, frequency int) {
for range time.Tick(time.Duration(frequency) * time.Hour) {
runCleanupTasks(store)
}
}
v2-2.2.6/internal/config/ 0000775 0000000 0000000 00000000000 14756465373 0015155 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/config/config.go 0000664 0000000 0000000 00000000362 14756465373 0016752 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package config // import "miniflux.app/v2/internal/config"
// Opts holds parsed configuration options.
var Opts *Options
v2-2.2.6/internal/config/config_test.go 0000664 0000000 0000000 00000145514 14756465373 0020022 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package config // import "miniflux.app/v2/internal/config"
import (
"bytes"
"os"
"testing"
)
func TestLogFileDefaultValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFile() != defaultLogFile {
t.Fatalf(`Unexpected log file value, got %q`, opts.LogFile())
}
}
func TestLogFileWithCustomFilename(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_FILE", "foobar.log")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFile() != "foobar.log" {
t.Fatalf(`Unexpected log file value, got %q`, opts.LogFile())
}
}
func TestLogFileWithEmptyValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_FILE", "")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFile() != defaultLogFile {
t.Fatalf(`Unexpected log file value, got %q`, opts.LogFile())
}
}
func TestLogLevelDefaultValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != defaultLogLevel {
t.Fatalf(`Unexpected log level value, got %q`, opts.LogLevel())
}
}
func TestLogLevelWithCustomValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_LEVEL", "warning")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != "warning" {
t.Fatalf(`Unexpected log level value, got %q`, opts.LogLevel())
}
}
func TestLogLevelWithInvalidValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_LEVEL", "invalid")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != defaultLogLevel {
t.Fatalf(`Unexpected log level value, got %q`, opts.LogLevel())
}
}
func TestLogDateTimeDefaultValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogDateTime() != defaultLogDateTime {
t.Fatalf(`Unexpected log date time value, got %v`, opts.LogDateTime())
}
}
func TestLogDateTimeWithCustomValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_DATETIME", "false")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogDateTime() != false {
t.Fatalf(`Unexpected log date time value, got %v`, opts.LogDateTime())
}
}
func TestLogDateTimeWithInvalidValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_DATETIME", "invalid")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogDateTime() != defaultLogDateTime {
t.Fatalf(`Unexpected log date time value, got %v`, opts.LogDateTime())
}
}
func TestLogFormatDefaultValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFormat() != defaultLogFormat {
t.Fatalf(`Unexpected log format value, got %q`, opts.LogFormat())
}
}
func TestLogFormatWithCustomValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_FORMAT", "json")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFormat() != "json" {
t.Fatalf(`Unexpected log format value, got %q`, opts.LogFormat())
}
}
func TestLogFormatWithInvalidValue(t *testing.T) {
os.Clearenv()
os.Setenv("LOG_FORMAT", "invalid")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogFormat() != defaultLogFormat {
t.Fatalf(`Unexpected log format value, got %q`, opts.LogFormat())
}
}
func TestDebugModeOn(t *testing.T) {
os.Clearenv()
os.Setenv("DEBUG", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != "debug" {
t.Fatalf(`Unexpected debug mode value, got %q`, opts.LogLevel())
}
}
func TestDebugModeOff(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != "info" {
t.Fatalf(`Unexpected debug mode value, got %q`, opts.LogLevel())
}
}
func TestCustomBaseURL(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "http://example.org")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.BaseURL() != "http://example.org" {
t.Fatalf(`Unexpected base URL, got "%s"`, opts.BaseURL())
}
if opts.RootURL() != "http://example.org" {
t.Fatalf(`Unexpected root URL, got "%s"`, opts.RootURL())
}
if opts.BasePath() != "" {
t.Fatalf(`Unexpected base path, got "%s"`, opts.BasePath())
}
}
func TestCustomBaseURLWithTrailingSlash(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "http://example.org/folder/")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.BaseURL() != "http://example.org/folder" {
t.Fatalf(`Unexpected base URL, got "%s"`, opts.BaseURL())
}
if opts.RootURL() != "http://example.org" {
t.Fatalf(`Unexpected root URL, got "%s"`, opts.RootURL())
}
if opts.BasePath() != "/folder" {
t.Fatalf(`Unexpected base path, got "%s"`, opts.BasePath())
}
}
func TestCustomBaseURLWithCustomPort(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "http://example.org:88/folder/")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.BaseURL() != "http://example.org:88/folder" {
t.Fatalf(`Unexpected base URL, got "%s"`, opts.BaseURL())
}
if opts.RootURL() != "http://example.org:88" {
t.Fatalf(`Unexpected root URL, got "%s"`, opts.RootURL())
}
if opts.BasePath() != "/folder" {
t.Fatalf(`Unexpected base path, got "%s"`, opts.BasePath())
}
}
func TestBaseURLWithoutScheme(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "example.org/folder/")
_, err := NewParser().ParseEnvironmentVariables()
if err == nil {
t.Fatalf(`Parsing must fail`)
}
}
func TestBaseURLWithInvalidScheme(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "ftp://example.org/folder/")
_, err := NewParser().ParseEnvironmentVariables()
if err == nil {
t.Fatalf(`Parsing must fail`)
}
}
func TestInvalidBaseURL(t *testing.T) {
os.Clearenv()
os.Setenv("BASE_URL", "http://example|org")
_, err := NewParser().ParseEnvironmentVariables()
if err == nil {
t.Fatalf(`Parsing must fail`)
}
}
func TestDefaultBaseURL(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.BaseURL() != defaultBaseURL {
t.Fatalf(`Unexpected base URL, got "%s"`, opts.BaseURL())
}
if opts.RootURL() != defaultBaseURL {
t.Fatalf(`Unexpected root URL, got "%s"`, opts.RootURL())
}
if opts.BasePath() != "" {
t.Fatalf(`Unexpected base path, got "%s"`, opts.BasePath())
}
}
func TestDatabaseURL(t *testing.T) {
os.Clearenv()
os.Setenv("DATABASE_URL", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "foobar"
result := opts.DatabaseURL()
if result != expected {
t.Errorf(`Unexpected DATABASE_URL value, got %q instead of %q`, result, expected)
}
if opts.IsDefaultDatabaseURL() {
t.Errorf(`This is not the default database URL and it should returns false`)
}
}
func TestDefaultDatabaseURLValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultDatabaseURL
result := opts.DatabaseURL()
if result != expected {
t.Errorf(`Unexpected DATABASE_URL value, got %q instead of %q`, result, expected)
}
if !opts.IsDefaultDatabaseURL() {
t.Errorf(`This is the default database URL and it should returns true`)
}
}
func TestDefaultDatabaseMaxConnsValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultDatabaseMaxConns
result := opts.DatabaseMaxConns()
if result != expected {
t.Fatalf(`Unexpected DATABASE_MAX_CONNS value, got %v instead of %v`, result, expected)
}
}
func TestDatabaseMaxConns(t *testing.T) {
os.Clearenv()
os.Setenv("DATABASE_MAX_CONNS", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.DatabaseMaxConns()
if result != expected {
t.Fatalf(`Unexpected DATABASE_MAX_CONNS value, got %v instead of %v`, result, expected)
}
}
func TestDefaultDatabaseMinConnsValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultDatabaseMinConns
result := opts.DatabaseMinConns()
if result != expected {
t.Fatalf(`Unexpected DATABASE_MIN_CONNS value, got %v instead of %v`, result, expected)
}
}
func TestDatabaseMinConns(t *testing.T) {
os.Clearenv()
os.Setenv("DATABASE_MIN_CONNS", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.DatabaseMinConns()
if result != expected {
t.Fatalf(`Unexpected DATABASE_MIN_CONNS value, got %v instead of %v`, result, expected)
}
}
func TestListenAddr(t *testing.T) {
os.Clearenv()
os.Setenv("LISTEN_ADDR", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "foobar"
result := opts.ListenAddr()
if result != expected {
t.Fatalf(`Unexpected LISTEN_ADDR value, got %q instead of %q`, result, expected)
}
}
func TestListenAddrWithPortDefined(t *testing.T) {
os.Clearenv()
os.Setenv("PORT", "3000")
os.Setenv("LISTEN_ADDR", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := ":3000"
result := opts.ListenAddr()
if result != expected {
t.Fatalf(`Unexpected LISTEN_ADDR value, got %q instead of %q`, result, expected)
}
}
func TestDefaultListenAddrValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultListenAddr
result := opts.ListenAddr()
if result != expected {
t.Fatalf(`Unexpected LISTEN_ADDR value, got %q instead of %q`, result, expected)
}
}
func TestCertFile(t *testing.T) {
os.Clearenv()
os.Setenv("CERT_FILE", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "foobar"
result := opts.CertFile()
if result != expected {
t.Fatalf(`Unexpected CERT_FILE value, got %q instead of %q`, result, expected)
}
}
func TestDefaultCertFileValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultCertFile
result := opts.CertFile()
if result != expected {
t.Fatalf(`Unexpected CERT_FILE value, got %q instead of %q`, result, expected)
}
}
func TestKeyFile(t *testing.T) {
os.Clearenv()
os.Setenv("KEY_FILE", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "foobar"
result := opts.CertKeyFile()
if result != expected {
t.Fatalf(`Unexpected KEY_FILE value, got %q instead of %q`, result, expected)
}
}
func TestDefaultKeyFileValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultKeyFile
result := opts.CertKeyFile()
if result != expected {
t.Fatalf(`Unexpected KEY_FILE value, got %q instead of %q`, result, expected)
}
}
func TestCertDomain(t *testing.T) {
os.Clearenv()
os.Setenv("CERT_DOMAIN", "example.org")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "example.org"
result := opts.CertDomain()
if result != expected {
t.Fatalf(`Unexpected CERT_DOMAIN value, got %q instead of %q`, result, expected)
}
}
func TestDefaultCertDomainValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultCertDomain
result := opts.CertDomain()
if result != expected {
t.Fatalf(`Unexpected CERT_DOMAIN value, got %q instead of %q`, result, expected)
}
}
func TestDefaultCleanupFrequencyHoursValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultCleanupFrequencyHours
result := opts.CleanupFrequencyHours()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_FREQUENCY_HOURS value, got %v instead of %v`, result, expected)
}
}
func TestCleanupFrequencyHours(t *testing.T) {
os.Clearenv()
os.Setenv("CLEANUP_FREQUENCY_HOURS", "42")
os.Setenv("CLEANUP_FREQUENCY", "19")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.CleanupFrequencyHours()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_FREQUENCY_HOURS value, got %v instead of %v`, result, expected)
}
}
func TestDefaultCleanupArchiveReadDaysValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 60
result := opts.CleanupArchiveReadDays()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_ARCHIVE_READ_DAYS value, got %v instead of %v`, result, expected)
}
}
func TestCleanupArchiveReadDays(t *testing.T) {
os.Clearenv()
os.Setenv("CLEANUP_ARCHIVE_READ_DAYS", "7")
os.Setenv("ARCHIVE_READ_DAYS", "19")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 7
result := opts.CleanupArchiveReadDays()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_ARCHIVE_READ_DAYS value, got %v instead of %v`, result, expected)
}
}
func TestDefaultCleanupRemoveSessionsDaysValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 30
result := opts.CleanupRemoveSessionsDays()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_REMOVE_SESSIONS_DAYS value, got %v instead of %v`, result, expected)
}
}
func TestCleanupRemoveSessionsDays(t *testing.T) {
os.Clearenv()
os.Setenv("CLEANUP_REMOVE_SESSIONS_DAYS", "7")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 7
result := opts.CleanupRemoveSessionsDays()
if result != expected {
t.Fatalf(`Unexpected CLEANUP_REMOVE_SESSIONS_DAYS value, got %v instead of %v`, result, expected)
}
}
func TestDefaultWorkerPoolSizeValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultWorkerPoolSize
result := opts.WorkerPoolSize()
if result != expected {
t.Fatalf(`Unexpected WORKER_POOL_SIZE value, got %v instead of %v`, result, expected)
}
}
func TestWorkerPoolSize(t *testing.T) {
os.Clearenv()
os.Setenv("WORKER_POOL_SIZE", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.WorkerPoolSize()
if result != expected {
t.Fatalf(`Unexpected WORKER_POOL_SIZE value, got %v instead of %v`, result, expected)
}
}
func TestDefautPollingFrequencyValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultPollingFrequency
result := opts.PollingFrequency()
if result != expected {
t.Fatalf(`Unexpected POLLING_FREQUENCY value, got %v instead of %v`, result, expected)
}
}
func TestPollingFrequency(t *testing.T) {
os.Clearenv()
os.Setenv("POLLING_FREQUENCY", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.PollingFrequency()
if result != expected {
t.Fatalf(`Unexpected POLLING_FREQUENCY value, got %v instead of %v`, result, expected)
}
}
func TestDefautForceRefreshInterval(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultForceRefreshInterval
result := opts.ForceRefreshInterval()
if result != expected {
t.Fatalf(`Unexpected FORCE_REFRESH_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestForceRefreshInterval(t *testing.T) {
os.Clearenv()
os.Setenv("FORCE_REFRESH_INTERVAL", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.ForceRefreshInterval()
if result != expected {
t.Fatalf(`Unexpected FORCE_REFRESH_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestDefaultBatchSizeValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultBatchSize
result := opts.BatchSize()
if result != expected {
t.Fatalf(`Unexpected BATCH_SIZE value, got %v instead of %v`, result, expected)
}
}
func TestBatchSize(t *testing.T) {
os.Clearenv()
os.Setenv("BATCH_SIZE", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.BatchSize()
if result != expected {
t.Fatalf(`Unexpected BATCH_SIZE value, got %v instead of %v`, result, expected)
}
}
func TestDefautPollingSchedulerValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultPollingScheduler
result := opts.PollingScheduler()
if result != expected {
t.Fatalf(`Unexpected POLLING_SCHEDULER value, got %v instead of %v`, result, expected)
}
}
func TestPollingScheduler(t *testing.T) {
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_count_based")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "entry_count_based"
result := opts.PollingScheduler()
if result != expected {
t.Fatalf(`Unexpected POLLING_SCHEDULER value, got %v instead of %v`, result, expected)
}
}
func TestDefautSchedulerEntryFrequencyMaxIntervalValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultSchedulerEntryFrequencyMaxInterval
result := opts.SchedulerEntryFrequencyMaxInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestSchedulerEntryFrequencyMaxInterval(t *testing.T) {
os.Clearenv()
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", "30")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 30
result := opts.SchedulerEntryFrequencyMaxInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestDefautSchedulerEntryFrequencyMinIntervalValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultSchedulerEntryFrequencyMinInterval
result := opts.SchedulerEntryFrequencyMinInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestSchedulerEntryFrequencyMinInterval(t *testing.T) {
os.Clearenv()
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", "30")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 30
result := opts.SchedulerEntryFrequencyMinInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestDefautSchedulerEntryFrequencyFactorValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultSchedulerEntryFrequencyFactor
result := opts.SchedulerEntryFrequencyFactor()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_FACTOR value, got %v instead of %v`, result, expected)
}
}
func TestSchedulerEntryFrequencyFactor(t *testing.T) {
os.Clearenv()
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_FACTOR", "2")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 2
result := opts.SchedulerEntryFrequencyFactor()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ENTRY_FREQUENCY_FACTOR value, got %v instead of %v`, result, expected)
}
}
func TestDefaultSchedulerRoundRobinValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultSchedulerRoundRobinMinInterval
result := opts.SchedulerRoundRobinMinInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ROUND_ROBIN_MIN_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestSchedulerRoundRobin(t *testing.T) {
os.Clearenv()
os.Setenv("SCHEDULER_ROUND_ROBIN_MIN_INTERVAL", "15")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 15
result := opts.SchedulerRoundRobinMinInterval()
if result != expected {
t.Fatalf(`Unexpected SCHEDULER_ROUND_ROBIN_MIN_INTERVAL value, got %v instead of %v`, result, expected)
}
}
func TestPollingParsingErrorLimit(t *testing.T) {
os.Clearenv()
os.Setenv("POLLING_PARSING_ERROR_LIMIT", "100")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 100
result := opts.PollingParsingErrorLimit()
if result != expected {
t.Fatalf(`Unexpected POLLING_SCHEDULER value, got %v instead of %v`, result, expected)
}
}
func TestOAuth2UserCreationWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.IsOAuth2UserCreationAllowed()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_USER_CREATION value, got %v instead of %v`, result, expected)
}
}
func TestOAuth2UserCreationAdmin(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_USER_CREATION", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.IsOAuth2UserCreationAllowed()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_USER_CREATION value, got %v instead of %v`, result, expected)
}
}
func TestOAuth2ClientID(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_CLIENT_ID", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "foobar"
result := opts.OAuth2ClientID()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_CLIENT_ID value, got %q instead of %q`, result, expected)
}
}
func TestDefaultOAuth2ClientIDValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultOAuth2ClientID
result := opts.OAuth2ClientID()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_CLIENT_ID value, got %q instead of %q`, result, expected)
}
}
func TestOAuth2ClientSecret(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_CLIENT_SECRET", "secret")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "secret"
result := opts.OAuth2ClientSecret()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_CLIENT_SECRET value, got %q instead of %q`, result, expected)
}
}
func TestDefaultOAuth2ClientSecretValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultOAuth2ClientSecret
result := opts.OAuth2ClientSecret()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_CLIENT_SECRET value, got %q instead of %q`, result, expected)
}
}
func TestOAuth2RedirectURL(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_REDIRECT_URL", "http://example.org")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "http://example.org"
result := opts.OAuth2RedirectURL()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_REDIRECT_URL value, got %q instead of %q`, result, expected)
}
}
func TestDefaultOAuth2RedirectURLValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultOAuth2RedirectURL
result := opts.OAuth2RedirectURL()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_REDIRECT_URL value, got %q instead of %q`, result, expected)
}
}
func TestOAuth2OIDCDiscoveryEndpoint(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_OIDC_DISCOVERY_ENDPOINT", "http://example.org")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "http://example.org"
result := opts.OIDCDiscoveryEndpoint()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_OIDC_DISCOVERY_ENDPOINT value, got %q instead of %q`, result, expected)
}
}
func TestDefaultOIDCDiscoveryEndpointValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultOAuth2OidcDiscoveryEndpoint
result := opts.OIDCDiscoveryEndpoint()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_OIDC_DISCOVERY_ENDPOINT value, got %q instead of %q`, result, expected)
}
}
func TestOAuth2Provider(t *testing.T) {
os.Clearenv()
os.Setenv("OAUTH2_PROVIDER", "google")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "google"
result := opts.OAuth2Provider()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_PROVIDER value, got %q instead of %q`, result, expected)
}
}
func TestDefaultOAuth2ProviderValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultOAuth2Provider
result := opts.OAuth2Provider()
if result != expected {
t.Fatalf(`Unexpected OAUTH2_PROVIDER value, got %q instead of %q`, result, expected)
}
}
func TestHSTSWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.HasHSTS()
if result != expected {
t.Fatalf(`Unexpected DISABLE_HSTS value, got %v instead of %v`, result, expected)
}
}
func TestHSTS(t *testing.T) {
os.Clearenv()
os.Setenv("DISABLE_HSTS", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.HasHSTS()
if result != expected {
t.Fatalf(`Unexpected DISABLE_HSTS value, got %v instead of %v`, result, expected)
}
}
func TestDisableHTTPServiceWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.HasHTTPService()
if result != expected {
t.Fatalf(`Unexpected DISABLE_HTTP_SERVICE value, got %v instead of %v`, result, expected)
}
}
func TestDisableHTTPService(t *testing.T) {
os.Clearenv()
os.Setenv("DISABLE_HTTP_SERVICE", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.HasHTTPService()
if result != expected {
t.Fatalf(`Unexpected DISABLE_HTTP_SERVICE value, got %v instead of %v`, result, expected)
}
}
func TestDisableSchedulerServiceWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.HasSchedulerService()
if result != expected {
t.Fatalf(`Unexpected DISABLE_SCHEDULER_SERVICE value, got %v instead of %v`, result, expected)
}
}
func TestDisableSchedulerService(t *testing.T) {
os.Clearenv()
os.Setenv("DISABLE_SCHEDULER_SERVICE", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.HasSchedulerService()
if result != expected {
t.Fatalf(`Unexpected DISABLE_SCHEDULER_SERVICE value, got %v instead of %v`, result, expected)
}
}
func TestRunMigrationsWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.RunMigrations()
if result != expected {
t.Fatalf(`Unexpected RUN_MIGRATIONS value, got %v instead of %v`, result, expected)
}
}
func TestRunMigrations(t *testing.T) {
os.Clearenv()
os.Setenv("RUN_MIGRATIONS", "yes")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.RunMigrations()
if result != expected {
t.Fatalf(`Unexpected RUN_MIGRATIONS value, got %v instead of %v`, result, expected)
}
}
func TestCreateAdminWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.CreateAdmin()
if result != expected {
t.Fatalf(`Unexpected CREATE_ADMIN value, got %v instead of %v`, result, expected)
}
}
func TestCreateAdmin(t *testing.T) {
os.Clearenv()
os.Setenv("CREATE_ADMIN", "true")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.CreateAdmin()
if result != expected {
t.Fatalf(`Unexpected CREATE_ADMIN value, got %v instead of %v`, result, expected)
}
}
func TestPocketConsumerKeyFromEnvVariable(t *testing.T) {
os.Clearenv()
os.Setenv("POCKET_CONSUMER_KEY", "something")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "something"
result := opts.PocketConsumerKey("default")
if result != expected {
t.Fatalf(`Unexpected POCKET_CONSUMER_KEY value, got %q instead of %q`, result, expected)
}
}
func TestPocketConsumerKeyFromUserPrefs(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "default"
result := opts.PocketConsumerKey("default")
if result != expected {
t.Fatalf(`Unexpected POCKET_CONSUMER_KEY value, got %q instead of %q`, result, expected)
}
}
func TestMediaProxyMode(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_MODE", "all")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "all"
result := opts.MediaProxyMode()
if result != expected {
t.Fatalf(`Unexpected MEDIA_PROXY_MODE value, got %q instead of %q`, result, expected)
}
}
func TestDefaultMediaProxyModeValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultMediaProxyMode
result := opts.MediaProxyMode()
if result != expected {
t.Fatalf(`Unexpected MEDIA_PROXY_MODE value, got %q instead of %q`, result, expected)
}
}
func TestMediaProxyResourceTypes(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_RESOURCE_TYPES", "image,audio")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []string{"audio", "image"}
if len(expected) != len(opts.MediaProxyResourceTypes()) {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
resultMap := make(map[string]bool)
for _, mediaType := range opts.MediaProxyResourceTypes() {
resultMap[mediaType] = true
}
for _, mediaType := range expected {
if !resultMap[mediaType] {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
}
}
func TestMediaProxyResourceTypesWithDuplicatedValues(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_RESOURCE_TYPES", "image,audio, image")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []string{"audio", "image"}
if len(expected) != len(opts.MediaProxyResourceTypes()) {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
resultMap := make(map[string]bool)
for _, mediaType := range opts.MediaProxyResourceTypes() {
resultMap[mediaType] = true
}
for _, mediaType := range expected {
if !resultMap[mediaType] {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
}
}
func TestDefaultMediaProxyResourceTypes(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []string{"image"}
if len(expected) != len(opts.MediaProxyResourceTypes()) {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
resultMap := make(map[string]bool)
for _, mediaType := range opts.MediaProxyResourceTypes() {
resultMap[mediaType] = true
}
for _, mediaType := range expected {
if !resultMap[mediaType] {
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
}
}
func TestMediaProxyHTTPClientTimeout(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_HTTP_CLIENT_TIMEOUT", "24")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 24
result := opts.MediaProxyHTTPClientTimeout()
if result != expected {
t.Fatalf(`Unexpected MEDIA_PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestDefaultMediaProxyHTTPClientTimeoutValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultMediaProxyHTTPClientTimeout
result := opts.MediaProxyHTTPClientTimeout()
if result != expected {
t.Fatalf(`Unexpected MEDIA_PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestMediaProxyCustomURL(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_CUSTOM_URL", "http://example.org/proxy")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "http://example.org/proxy"
result := opts.MediaCustomProxyURL()
if result != expected {
t.Fatalf(`Unexpected MEDIA_PROXY_CUSTOM_URL value, got %q instead of %q`, result, expected)
}
}
func TestMediaProxyPrivateKey(t *testing.T) {
os.Clearenv()
os.Setenv("MEDIA_PROXY_PRIVATE_KEY", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []byte("foobar")
result := opts.MediaProxyPrivateKey()
if !bytes.Equal(result, expected) {
t.Fatalf(`Unexpected MEDIA_PROXY_PRIVATE_KEY value, got %q instead of %q`, result, expected)
}
}
func TestProxyImagesOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_IMAGES", "all")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []string{"image"}
if len(expected) != len(opts.MediaProxyResourceTypes()) {
t.Fatalf(`Unexpected PROXY_IMAGES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
resultMap := make(map[string]bool)
for _, mediaType := range opts.MediaProxyResourceTypes() {
resultMap[mediaType] = true
}
for _, mediaType := range expected {
if !resultMap[mediaType] {
t.Fatalf(`Unexpected PROXY_IMAGES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
}
expectedProxyOption := "all"
result := opts.MediaProxyMode()
if result != expectedProxyOption {
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expectedProxyOption)
}
}
func TestProxyImageURLForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_IMAGE_URL", "http://example.org/proxy")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "http://example.org/proxy"
result := opts.MediaCustomProxyURL()
if result != expected {
t.Fatalf(`Unexpected PROXY_IMAGE_URL value, got %q instead of %q`, result, expected)
}
}
func TestProxyURLOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_URL", "http://example.org/proxy")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "http://example.org/proxy"
result := opts.MediaCustomProxyURL()
if result != expected {
t.Fatalf(`Unexpected PROXY_URL value, got %q instead of %q`, result, expected)
}
}
func TestProxyMediaTypesOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_MEDIA_TYPES", "image,audio")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []string{"audio", "image"}
if len(expected) != len(opts.MediaProxyResourceTypes()) {
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
resultMap := make(map[string]bool)
for _, mediaType := range opts.MediaProxyResourceTypes() {
resultMap[mediaType] = true
}
for _, mediaType := range expected {
if !resultMap[mediaType] {
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
}
}
}
func TestProxyOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "all"
result := opts.MediaProxyMode()
if result != expected {
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expected)
}
}
func TestProxyHTTPClientTimeoutOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_HTTP_CLIENT_TIMEOUT", "24")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 24
result := opts.MediaProxyHTTPClientTimeout()
if result != expected {
t.Fatalf(`Unexpected PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestProxyPrivateKeyOptionForBackwardCompatibility(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_PRIVATE_KEY", "foobar")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := []byte("foobar")
result := opts.MediaProxyPrivateKey()
if !bytes.Equal(result, expected) {
t.Fatalf(`Unexpected PROXY_PRIVATE_KEY value, got %q instead of %q`, result, expected)
}
}
func TestHTTPSOff(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if opts.HTTPS {
t.Fatalf(`Unexpected HTTPS value, got "%v"`, opts.HTTPS)
}
}
func TestHTTPSOn(t *testing.T) {
os.Clearenv()
os.Setenv("HTTPS", "on")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
if !opts.HTTPS {
t.Fatalf(`Unexpected HTTPS value, got "%v"`, opts.HTTPS)
}
}
func TestHTTPClientTimeout(t *testing.T) {
os.Clearenv()
os.Setenv("HTTP_CLIENT_TIMEOUT", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 42
result := opts.HTTPClientTimeout()
if result != expected {
t.Fatalf(`Unexpected HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestDefaultHTTPClientTimeoutValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultHTTPClientTimeout
result := opts.HTTPClientTimeout()
if result != expected {
t.Fatalf(`Unexpected HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestHTTPClientMaxBodySize(t *testing.T) {
os.Clearenv()
os.Setenv("HTTP_CLIENT_MAX_BODY_SIZE", "42")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := int64(42 * 1024 * 1024)
result := opts.HTTPClientMaxBodySize()
if result != expected {
t.Fatalf(`Unexpected HTTP_CLIENT_MAX_BODY_SIZE value, got %d instead of %d`, result, expected)
}
}
func TestDefaultHTTPClientMaxBodySizeValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := int64(defaultHTTPClientMaxBodySize * 1024 * 1024)
result := opts.HTTPClientMaxBodySize()
if result != expected {
t.Fatalf(`Unexpected HTTP_CLIENT_MAX_BODY_SIZE value, got %d instead of %d`, result, expected)
}
}
func TestHTTPServerTimeout(t *testing.T) {
os.Clearenv()
os.Setenv("HTTP_SERVER_TIMEOUT", "342")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := 342
result := opts.HTTPServerTimeout()
if result != expected {
t.Fatalf(`Unexpected HTTP_SERVER_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestDefaultHTTPServerTimeoutValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultHTTPServerTimeout
result := opts.HTTPServerTimeout()
if result != expected {
t.Fatalf(`Unexpected HTTP_SERVER_TIMEOUT value, got %d instead of %d`, result, expected)
}
}
func TestParseConfigFile(t *testing.T) {
content := []byte(`
# This is a comment
DEBUG = yes
POCKET_CONSUMER_KEY= >#1234
Invalid text
`)
tmpfile, err := os.CreateTemp(".", "miniflux.*.unit_test.conf")
if err != nil {
t.Fatal(err)
}
if _, err := tmpfile.Write(content); err != nil {
t.Fatal(err)
}
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseFile(tmpfile.Name())
if err != nil {
t.Errorf(`Parsing failure: %v`, err)
}
if opts.LogLevel() != "debug" {
t.Errorf(`Unexpected debug mode value, got %q`, opts.LogLevel())
}
expected := ">#1234"
result := opts.PocketConsumerKey("default")
if result != expected {
t.Errorf(`Unexpected POCKET_CONSUMER_KEY value, got %q instead of %q`, result, expected)
}
if err := tmpfile.Close(); err != nil {
t.Fatal(err)
}
if err := os.Remove(tmpfile.Name()); err != nil {
t.Fatal(err)
}
}
func TestAuthProxyHeader(t *testing.T) {
os.Clearenv()
os.Setenv("AUTH_PROXY_HEADER", "X-Forwarded-User")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "X-Forwarded-User"
result := opts.AuthProxyHeader()
if result != expected {
t.Fatalf(`Unexpected AUTH_PROXY_HEADER value, got %q instead of %q`, result, expected)
}
}
func TestDefaultAuthProxyHeaderValue(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := defaultAuthProxyHeader
result := opts.AuthProxyHeader()
if result != expected {
t.Fatalf(`Unexpected AUTH_PROXY_HEADER value, got %q instead of %q`, result, expected)
}
}
func TestAuthProxyUserCreationWhenUnset(t *testing.T) {
os.Clearenv()
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := false
result := opts.IsAuthProxyUserCreationAllowed()
if result != expected {
t.Fatalf(`Unexpected AUTH_PROXY_USER_CREATION value, got %v instead of %v`, result, expected)
}
}
func TestAuthProxyUserCreationAdmin(t *testing.T) {
os.Clearenv()
os.Setenv("AUTH_PROXY_USER_CREATION", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.IsAuthProxyUserCreationAllowed()
if result != expected {
t.Fatalf(`Unexpected AUTH_PROXY_USER_CREATION value, got %v instead of %v`, result, expected)
}
}
func TestFetchBilibiliWatchTime(t *testing.T) {
os.Clearenv()
os.Setenv("FETCH_BILIBILI_WATCH_TIME", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.FetchBilibiliWatchTime()
if result != expected {
t.Fatalf(`Unexpected FETCH_BILIBILI_WATCH_TIME value, got %v instead of %v`, result, expected)
}
}
func TestFetchNebulaWatchTime(t *testing.T) {
os.Clearenv()
os.Setenv("FETCH_NEBULA_WATCH_TIME", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.FetchNebulaWatchTime()
if result != expected {
t.Fatalf(`Unexpected FETCH_NEBULA_WATCH_TIME value, got %v instead of %v`, result, expected)
}
}
func TestFetchOdyseeWatchTime(t *testing.T) {
os.Clearenv()
os.Setenv("FETCH_ODYSEE_WATCH_TIME", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.FetchOdyseeWatchTime()
if result != expected {
t.Fatalf(`Unexpected FETCH_ODYSEE_WATCH_TIME value, got %v instead of %v`, result, expected)
}
}
func TestFetchYouTubeWatchTime(t *testing.T) {
os.Clearenv()
os.Setenv("FETCH_YOUTUBE_WATCH_TIME", "1")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := true
result := opts.FetchYouTubeWatchTime()
if result != expected {
t.Fatalf(`Unexpected FETCH_YOUTUBE_WATCH_TIME value, got %v instead of %v`, result, expected)
}
}
func TestYouTubeApiKey(t *testing.T) {
os.Clearenv()
os.Setenv("YOUTUBE_API_KEY", "AAAAAAAAAAAAAaaaaaaaaaaaaa0000000000000")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "AAAAAAAAAAAAAaaaaaaaaaaaaa0000000000000"
result := opts.YouTubeApiKey()
if result != expected {
t.Fatalf(`Unexpected YOUTUBE_API_KEY value, got %v instead of %v`, result, expected)
}
}
func TestYouTubeEmbedUrlOverride(t *testing.T) {
os.Clearenv()
os.Setenv("YOUTUBE_EMBED_URL_OVERRIDE", "https://invidious.custom/embed/")
parser := NewParser()
opts, err := parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
expected := "https://invidious.custom/embed/"
result := opts.YouTubeEmbedUrlOverride()
if result != expected {
t.Fatalf(`Unexpected YOUTUBE_EMBED_URL_OVERRIDE value, got %v instead of %v`, result, expected)
}
}
func TestParseConfigDumpOutput(t *testing.T) {
os.Clearenv()
wantOpts := NewOptions()
wantOpts.adminUsername = "my-username"
serialized := wantOpts.String()
tmpfile, err := os.CreateTemp(".", "miniflux.*.unit_test.conf")
if err != nil {
t.Fatal(err)
}
if _, err := tmpfile.WriteString(serialized); err != nil {
t.Fatal(err)
}
parser := NewParser()
parsedOpts, err := parser.ParseFile(tmpfile.Name())
if err != nil {
t.Errorf(`Parsing failure: %v`, err)
}
if parsedOpts.AdminUsername() != wantOpts.AdminUsername() {
t.Fatalf(`Unexpected ADMIN_USERNAME value, got %q instead of %q`, parsedOpts.AdminUsername(), wantOpts.AdminUsername())
}
if err := tmpfile.Close(); err != nil {
t.Fatal(err)
}
if err := os.Remove(tmpfile.Name()); err != nil {
t.Fatal(err)
}
}
v2-2.2.6/internal/config/options.go 0000664 0000000 0000000 00000074247 14756465373 0017215 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package config // import "miniflux.app/v2/internal/config"
import (
"fmt"
"sort"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/version"
)
const (
defaultHTTPS = false
defaultLogFile = "stderr"
defaultLogDateTime = false
defaultLogFormat = "text"
defaultLogLevel = "info"
defaultHSTS = true
defaultHTTPService = true
defaultSchedulerService = true
defaultDebug = false
defaultTiming = false
defaultBaseURL = "http://localhost"
defaultRootURL = "http://localhost"
defaultBasePath = ""
defaultWorkerPoolSize = 16
defaultPollingFrequency = 60
defaultForceRefreshInterval = 30
defaultBatchSize = 100
defaultPollingScheduler = "round_robin"
defaultSchedulerEntryFrequencyMinInterval = 5
defaultSchedulerEntryFrequencyMaxInterval = 24 * 60
defaultSchedulerEntryFrequencyFactor = 1
defaultSchedulerRoundRobinMinInterval = 60
defaultPollingParsingErrorLimit = 3
defaultRunMigrations = false
defaultDatabaseURL = "user=postgres password=postgres dbname=miniflux2 sslmode=disable"
defaultDatabaseMaxConns = 20
defaultDatabaseMinConns = 1
defaultDatabaseConnectionLifetime = 5
defaultListenAddr = "127.0.0.1:8080"
defaultCertFile = ""
defaultKeyFile = ""
defaultCertDomain = ""
defaultCleanupFrequencyHours = 24
defaultCleanupArchiveReadDays = 60
defaultCleanupArchiveUnreadDays = 180
defaultCleanupArchiveBatchSize = 10000
defaultCleanupRemoveSessionsDays = 30
defaultMediaProxyHTTPClientTimeout = 120
defaultMediaProxyMode = "http-only"
defaultMediaResourceTypes = "image"
defaultMediaProxyURL = ""
defaultFilterEntryMaxAgeDays = 0
defaultFetchBilibiliWatchTime = false
defaultFetchNebulaWatchTime = false
defaultFetchOdyseeWatchTime = false
defaultFetchYouTubeWatchTime = false
defaultYouTubeApiKey = ""
defaultYouTubeEmbedUrlOverride = "https://www.youtube-nocookie.com/embed/"
defaultCreateAdmin = false
defaultAdminUsername = ""
defaultAdminPassword = ""
defaultOAuth2UserCreation = false
defaultOAuth2ClientID = ""
defaultOAuth2ClientSecret = ""
defaultOAuth2RedirectURL = ""
defaultOAuth2OidcDiscoveryEndpoint = ""
defaultOauth2OidcProviderName = "OpenID Connect"
defaultOAuth2Provider = ""
defaultDisableLocalAuth = false
defaultPocketConsumerKey = ""
defaultHTTPClientTimeout = 20
defaultHTTPClientMaxBodySize = 15
defaultHTTPClientProxy = ""
defaultHTTPServerTimeout = 300
defaultAuthProxyHeader = ""
defaultAuthProxyUserCreation = false
defaultMaintenanceMode = false
defaultMaintenanceMessage = "Miniflux is currently under maintenance"
defaultMetricsCollector = false
defaultMetricsRefreshInterval = 60
defaultMetricsAllowedNetworks = "127.0.0.1/8"
defaultMetricsUsername = ""
defaultMetricsPassword = ""
defaultWatchdog = true
defaultInvidiousInstance = "yewtu.be"
defaultWebAuthn = false
)
var defaultHTTPClientUserAgent = "Mozilla/5.0 (compatible; Miniflux/" + version.Version + "; +https://miniflux.app)"
// Option contains a key to value map of a single option. It may be used to output debug strings.
type Option struct {
Key string
Value interface{}
}
// Options contains configuration options.
type Options struct {
HTTPS bool
logFile string
logDateTime bool
logFormat string
logLevel string
hsts bool
httpService bool
schedulerService bool
serverTimingHeader bool
baseURL string
rootURL string
basePath string
databaseURL string
databaseMaxConns int
databaseMinConns int
databaseConnectionLifetime int
runMigrations bool
listenAddr string
certFile string
certDomain string
certKeyFile string
cleanupFrequencyHours int
cleanupArchiveReadDays int
cleanupArchiveUnreadDays int
cleanupArchiveBatchSize int
cleanupRemoveSessionsDays int
pollingFrequency int
forceRefreshInterval int
batchSize int
pollingScheduler string
schedulerEntryFrequencyMinInterval int
schedulerEntryFrequencyMaxInterval int
schedulerEntryFrequencyFactor int
schedulerRoundRobinMinInterval int
pollingParsingErrorLimit int
workerPoolSize int
createAdmin bool
adminUsername string
adminPassword string
mediaProxyHTTPClientTimeout int
mediaProxyMode string
mediaProxyResourceTypes []string
mediaProxyCustomURL string
fetchBilibiliWatchTime bool
fetchNebulaWatchTime bool
fetchOdyseeWatchTime bool
fetchYouTubeWatchTime bool
filterEntryMaxAgeDays int
youTubeApiKey string
youTubeEmbedUrlOverride string
oauth2UserCreationAllowed bool
oauth2ClientID string
oauth2ClientSecret string
oauth2RedirectURL string
oidcDiscoveryEndpoint string
oidcProviderName string
oauth2Provider string
disableLocalAuth bool
pocketConsumerKey string
httpClientTimeout int
httpClientMaxBodySize int64
httpClientProxy string
httpClientUserAgent string
httpServerTimeout int
authProxyHeader string
authProxyUserCreation bool
maintenanceMode bool
maintenanceMessage string
metricsCollector bool
metricsRefreshInterval int
metricsAllowedNetworks []string
metricsUsername string
metricsPassword string
watchdog bool
invidiousInstance string
mediaProxyPrivateKey []byte
webAuthn bool
}
// NewOptions returns Options with default values.
func NewOptions() *Options {
return &Options{
HTTPS: defaultHTTPS,
logFile: defaultLogFile,
logDateTime: defaultLogDateTime,
logFormat: defaultLogFormat,
logLevel: defaultLogLevel,
hsts: defaultHSTS,
httpService: defaultHTTPService,
schedulerService: defaultSchedulerService,
serverTimingHeader: defaultTiming,
baseURL: defaultBaseURL,
rootURL: defaultRootURL,
basePath: defaultBasePath,
databaseURL: defaultDatabaseURL,
databaseMaxConns: defaultDatabaseMaxConns,
databaseMinConns: defaultDatabaseMinConns,
databaseConnectionLifetime: defaultDatabaseConnectionLifetime,
runMigrations: defaultRunMigrations,
listenAddr: defaultListenAddr,
certFile: defaultCertFile,
certDomain: defaultCertDomain,
certKeyFile: defaultKeyFile,
cleanupFrequencyHours: defaultCleanupFrequencyHours,
cleanupArchiveReadDays: defaultCleanupArchiveReadDays,
cleanupArchiveUnreadDays: defaultCleanupArchiveUnreadDays,
cleanupArchiveBatchSize: defaultCleanupArchiveBatchSize,
cleanupRemoveSessionsDays: defaultCleanupRemoveSessionsDays,
pollingFrequency: defaultPollingFrequency,
forceRefreshInterval: defaultForceRefreshInterval,
batchSize: defaultBatchSize,
pollingScheduler: defaultPollingScheduler,
schedulerEntryFrequencyMinInterval: defaultSchedulerEntryFrequencyMinInterval,
schedulerEntryFrequencyMaxInterval: defaultSchedulerEntryFrequencyMaxInterval,
schedulerEntryFrequencyFactor: defaultSchedulerEntryFrequencyFactor,
schedulerRoundRobinMinInterval: defaultSchedulerRoundRobinMinInterval,
pollingParsingErrorLimit: defaultPollingParsingErrorLimit,
workerPoolSize: defaultWorkerPoolSize,
createAdmin: defaultCreateAdmin,
mediaProxyHTTPClientTimeout: defaultMediaProxyHTTPClientTimeout,
mediaProxyMode: defaultMediaProxyMode,
mediaProxyResourceTypes: []string{defaultMediaResourceTypes},
mediaProxyCustomURL: defaultMediaProxyURL,
filterEntryMaxAgeDays: defaultFilterEntryMaxAgeDays,
fetchBilibiliWatchTime: defaultFetchBilibiliWatchTime,
fetchNebulaWatchTime: defaultFetchNebulaWatchTime,
fetchOdyseeWatchTime: defaultFetchOdyseeWatchTime,
fetchYouTubeWatchTime: defaultFetchYouTubeWatchTime,
youTubeApiKey: defaultYouTubeApiKey,
youTubeEmbedUrlOverride: defaultYouTubeEmbedUrlOverride,
oauth2UserCreationAllowed: defaultOAuth2UserCreation,
oauth2ClientID: defaultOAuth2ClientID,
oauth2ClientSecret: defaultOAuth2ClientSecret,
oauth2RedirectURL: defaultOAuth2RedirectURL,
oidcDiscoveryEndpoint: defaultOAuth2OidcDiscoveryEndpoint,
oidcProviderName: defaultOauth2OidcProviderName,
oauth2Provider: defaultOAuth2Provider,
disableLocalAuth: defaultDisableLocalAuth,
pocketConsumerKey: defaultPocketConsumerKey,
httpClientTimeout: defaultHTTPClientTimeout,
httpClientMaxBodySize: defaultHTTPClientMaxBodySize * 1024 * 1024,
httpClientProxy: defaultHTTPClientProxy,
httpClientUserAgent: defaultHTTPClientUserAgent,
httpServerTimeout: defaultHTTPServerTimeout,
authProxyHeader: defaultAuthProxyHeader,
authProxyUserCreation: defaultAuthProxyUserCreation,
maintenanceMode: defaultMaintenanceMode,
maintenanceMessage: defaultMaintenanceMessage,
metricsCollector: defaultMetricsCollector,
metricsRefreshInterval: defaultMetricsRefreshInterval,
metricsAllowedNetworks: []string{defaultMetricsAllowedNetworks},
metricsUsername: defaultMetricsUsername,
metricsPassword: defaultMetricsPassword,
watchdog: defaultWatchdog,
invidiousInstance: defaultInvidiousInstance,
mediaProxyPrivateKey: crypto.GenerateRandomBytes(16),
webAuthn: defaultWebAuthn,
}
}
func (o *Options) LogFile() string {
return o.logFile
}
// LogDateTime returns true if the date/time should be displayed in log messages.
func (o *Options) LogDateTime() bool {
return o.logDateTime
}
// LogFormat returns the log format.
func (o *Options) LogFormat() string {
return o.logFormat
}
// LogLevel returns the log level.
func (o *Options) LogLevel() string {
return o.logLevel
}
// SetLogLevel sets the log level.
func (o *Options) SetLogLevel(level string) {
o.logLevel = level
}
// HasMaintenanceMode returns true if maintenance mode is enabled.
func (o *Options) HasMaintenanceMode() bool {
return o.maintenanceMode
}
// MaintenanceMessage returns maintenance message.
func (o *Options) MaintenanceMessage() string {
return o.maintenanceMessage
}
// HasServerTimingHeader returns true if server-timing headers enabled.
func (o *Options) HasServerTimingHeader() bool {
return o.serverTimingHeader
}
// BaseURL returns the application base URL with path.
func (o *Options) BaseURL() string {
return o.baseURL
}
// RootURL returns the base URL without path.
func (o *Options) RootURL() string {
return o.rootURL
}
// BasePath returns the application base path according to the base URL.
func (o *Options) BasePath() string {
return o.basePath
}
// IsDefaultDatabaseURL returns true if the default database URL is used.
func (o *Options) IsDefaultDatabaseURL() bool {
return o.databaseURL == defaultDatabaseURL
}
// DatabaseURL returns the database URL.
func (o *Options) DatabaseURL() string {
return o.databaseURL
}
// DatabaseMaxConns returns the maximum number of database connections.
func (o *Options) DatabaseMaxConns() int {
return o.databaseMaxConns
}
// DatabaseMinConns returns the minimum number of database connections.
func (o *Options) DatabaseMinConns() int {
return o.databaseMinConns
}
// DatabaseConnectionLifetime returns the maximum amount of time a connection may be reused.
func (o *Options) DatabaseConnectionLifetime() time.Duration {
return time.Duration(o.databaseConnectionLifetime) * time.Minute
}
// ListenAddr returns the listen address for the HTTP server.
func (o *Options) ListenAddr() string {
return o.listenAddr
}
// CertFile returns the SSL certificate filename if any.
func (o *Options) CertFile() string {
return o.certFile
}
// CertKeyFile returns the private key filename for custom SSL certificate.
func (o *Options) CertKeyFile() string {
return o.certKeyFile
}
// CertDomain returns the domain to use for Let's Encrypt certificate.
func (o *Options) CertDomain() string {
return o.certDomain
}
// CleanupFrequencyHours returns the interval in hours for cleanup jobs.
func (o *Options) CleanupFrequencyHours() int {
return o.cleanupFrequencyHours
}
// CleanupArchiveReadDays returns the number of days after which marking read items as removed.
func (o *Options) CleanupArchiveReadDays() int {
return o.cleanupArchiveReadDays
}
// CleanupArchiveUnreadDays returns the number of days after which marking unread items as removed.
func (o *Options) CleanupArchiveUnreadDays() int {
return o.cleanupArchiveUnreadDays
}
// CleanupArchiveBatchSize returns the number of entries to archive for each interval.
func (o *Options) CleanupArchiveBatchSize() int {
return o.cleanupArchiveBatchSize
}
// CleanupRemoveSessionsDays returns the number of days after which to remove sessions.
func (o *Options) CleanupRemoveSessionsDays() int {
return o.cleanupRemoveSessionsDays
}
// WorkerPoolSize returns the number of background worker.
func (o *Options) WorkerPoolSize() int {
return o.workerPoolSize
}
// PollingFrequency returns the interval to refresh feeds in the background.
func (o *Options) PollingFrequency() int {
return o.pollingFrequency
}
// ForceRefreshInterval returns the force refresh interval
func (o *Options) ForceRefreshInterval() int {
return o.forceRefreshInterval
}
// BatchSize returns the number of feeds to send for background processing.
func (o *Options) BatchSize() int {
return o.batchSize
}
// PollingScheduler returns the scheduler used for polling feeds.
func (o *Options) PollingScheduler() string {
return o.pollingScheduler
}
// SchedulerEntryFrequencyMaxInterval returns the maximum interval in minutes for the entry frequency scheduler.
func (o *Options) SchedulerEntryFrequencyMaxInterval() int {
return o.schedulerEntryFrequencyMaxInterval
}
// SchedulerEntryFrequencyMinInterval returns the minimum interval in minutes for the entry frequency scheduler.
func (o *Options) SchedulerEntryFrequencyMinInterval() int {
return o.schedulerEntryFrequencyMinInterval
}
// SchedulerEntryFrequencyFactor returns the factor for the entry frequency scheduler.
func (o *Options) SchedulerEntryFrequencyFactor() int {
return o.schedulerEntryFrequencyFactor
}
func (o *Options) SchedulerRoundRobinMinInterval() int {
return o.schedulerRoundRobinMinInterval
}
// PollingParsingErrorLimit returns the limit of errors when to stop polling.
func (o *Options) PollingParsingErrorLimit() int {
return o.pollingParsingErrorLimit
}
// IsOAuth2UserCreationAllowed returns true if user creation is allowed for OAuth2 users.
func (o *Options) IsOAuth2UserCreationAllowed() bool {
return o.oauth2UserCreationAllowed
}
// OAuth2ClientID returns the OAuth2 Client ID.
func (o *Options) OAuth2ClientID() string {
return o.oauth2ClientID
}
// OAuth2ClientSecret returns the OAuth2 client secret.
func (o *Options) OAuth2ClientSecret() string {
return o.oauth2ClientSecret
}
// OAuth2RedirectURL returns the OAuth2 redirect URL.
func (o *Options) OAuth2RedirectURL() string {
return o.oauth2RedirectURL
}
// OIDCDiscoveryEndpoint returns the OAuth2 OIDC discovery endpoint.
func (o *Options) OIDCDiscoveryEndpoint() string {
return o.oidcDiscoveryEndpoint
}
// OIDCProviderName returns the OAuth2 OIDC provider's display name
func (o *Options) OIDCProviderName() string {
return o.oidcProviderName
}
// OAuth2Provider returns the name of the OAuth2 provider configured.
func (o *Options) OAuth2Provider() string {
return o.oauth2Provider
}
// DisableLocalAUth returns true if the local user database should not be used to authenticate users
func (o *Options) DisableLocalAuth() bool {
return o.disableLocalAuth
}
// HasHSTS returns true if HTTP Strict Transport Security is enabled.
func (o *Options) HasHSTS() bool {
return o.hsts
}
// RunMigrations returns true if the environment variable RUN_MIGRATIONS is not empty.
func (o *Options) RunMigrations() bool {
return o.runMigrations
}
// CreateAdmin returns true if the environment variable CREATE_ADMIN is not empty.
func (o *Options) CreateAdmin() bool {
return o.createAdmin
}
// AdminUsername returns the admin username if defined.
func (o *Options) AdminUsername() string {
return o.adminUsername
}
// AdminPassword returns the admin password if defined.
func (o *Options) AdminPassword() string {
return o.adminPassword
}
// FetchYouTubeWatchTime returns true if the YouTube video duration
// should be fetched and used as a reading time.
func (o *Options) FetchYouTubeWatchTime() bool {
return o.fetchYouTubeWatchTime
}
// YouTubeApiKey returns the YouTube API key if defined.
func (o *Options) YouTubeApiKey() string {
return o.youTubeApiKey
}
// YouTubeEmbedUrlOverride returns YouTube URL which will be used for embeds
func (o *Options) YouTubeEmbedUrlOverride() string {
return o.youTubeEmbedUrlOverride
}
// FetchNebulaWatchTime returns true if the Nebula video duration
// should be fetched and used as a reading time.
func (o *Options) FetchNebulaWatchTime() bool {
return o.fetchNebulaWatchTime
}
// FetchOdyseeWatchTime returns true if the Odysee video duration
// should be fetched and used as a reading time.
func (o *Options) FetchOdyseeWatchTime() bool {
return o.fetchOdyseeWatchTime
}
// FetchBilibiliWatchTime returns true if the Bilibili video duration
// should be fetched and used as a reading time.
func (o *Options) FetchBilibiliWatchTime() bool {
return o.fetchBilibiliWatchTime
}
// MediaProxyMode returns "none" to never proxy, "http-only" to proxy non-HTTPS, "all" to always proxy.
func (o *Options) MediaProxyMode() string {
return o.mediaProxyMode
}
// MediaProxyResourceTypes returns a slice of resource types to proxy.
func (o *Options) MediaProxyResourceTypes() []string {
return o.mediaProxyResourceTypes
}
// MediaCustomProxyURL returns the custom proxy URL for medias.
func (o *Options) MediaCustomProxyURL() string {
return o.mediaProxyCustomURL
}
// MediaProxyHTTPClientTimeout returns the time limit in seconds before the proxy HTTP client cancel the request.
func (o *Options) MediaProxyHTTPClientTimeout() int {
return o.mediaProxyHTTPClientTimeout
}
// MediaProxyPrivateKey returns the private key used by the media proxy.
func (o *Options) MediaProxyPrivateKey() []byte {
return o.mediaProxyPrivateKey
}
// HasHTTPService returns true if the HTTP service is enabled.
func (o *Options) HasHTTPService() bool {
return o.httpService
}
// HasSchedulerService returns true if the scheduler service is enabled.
func (o *Options) HasSchedulerService() bool {
return o.schedulerService
}
// PocketConsumerKey returns the Pocket Consumer Key if configured.
func (o *Options) PocketConsumerKey(defaultValue string) string {
if o.pocketConsumerKey != "" {
return o.pocketConsumerKey
}
return defaultValue
}
// HTTPClientTimeout returns the time limit in seconds before the HTTP client cancel the request.
func (o *Options) HTTPClientTimeout() int {
return o.httpClientTimeout
}
// HTTPClientMaxBodySize returns the number of bytes allowed for the HTTP client to transfer.
func (o *Options) HTTPClientMaxBodySize() int64 {
return o.httpClientMaxBodySize
}
// HTTPClientProxy returns the proxy URL for HTTP client.
func (o *Options) HTTPClientProxy() string {
return o.httpClientProxy
}
// HTTPServerTimeout returns the time limit in seconds before the HTTP server cancel the request.
func (o *Options) HTTPServerTimeout() int {
return o.httpServerTimeout
}
// HasHTTPClientProxyConfigured returns true if the HTTP proxy is configured.
func (o *Options) HasHTTPClientProxyConfigured() bool {
return o.httpClientProxy != ""
}
// AuthProxyHeader returns an HTTP header name that contains username for
// authentication using auth proxy.
func (o *Options) AuthProxyHeader() string {
return o.authProxyHeader
}
// IsAuthProxyUserCreationAllowed returns true if user creation is allowed for
// users authenticated using auth proxy.
func (o *Options) IsAuthProxyUserCreationAllowed() bool {
return o.authProxyUserCreation
}
// HasMetricsCollector returns true if metrics collection is enabled.
func (o *Options) HasMetricsCollector() bool {
return o.metricsCollector
}
// MetricsRefreshInterval returns the refresh interval in seconds.
func (o *Options) MetricsRefreshInterval() int {
return o.metricsRefreshInterval
}
// MetricsAllowedNetworks returns the list of networks allowed to connect to the metrics endpoint.
func (o *Options) MetricsAllowedNetworks() []string {
return o.metricsAllowedNetworks
}
func (o *Options) MetricsUsername() string {
return o.metricsUsername
}
func (o *Options) MetricsPassword() string {
return o.metricsPassword
}
// HTTPClientUserAgent returns the global User-Agent header for miniflux.
func (o *Options) HTTPClientUserAgent() string {
return o.httpClientUserAgent
}
// HasWatchdog returns true if the systemd watchdog is enabled.
func (o *Options) HasWatchdog() bool {
return o.watchdog
}
// InvidiousInstance returns the invidious instance used by miniflux
func (o *Options) InvidiousInstance() string {
return o.invidiousInstance
}
// WebAuthn returns true if WebAuthn logins are supported
func (o *Options) WebAuthn() bool {
return o.webAuthn
}
// FilterEntryMaxAgeDays returns the number of days after which entries should be retained.
func (o *Options) FilterEntryMaxAgeDays() int {
return o.filterEntryMaxAgeDays
}
// SortedOptions returns options as a list of key value pairs, sorted by keys.
func (o *Options) SortedOptions(redactSecret bool) []*Option {
var keyValues = map[string]interface{}{
"ADMIN_PASSWORD": redactSecretValue(o.adminPassword, redactSecret),
"ADMIN_USERNAME": o.adminUsername,
"AUTH_PROXY_HEADER": o.authProxyHeader,
"AUTH_PROXY_USER_CREATION": o.authProxyUserCreation,
"BASE_PATH": o.basePath,
"BASE_URL": o.baseURL,
"BATCH_SIZE": o.batchSize,
"CERT_DOMAIN": o.certDomain,
"CERT_FILE": o.certFile,
"CLEANUP_ARCHIVE_BATCH_SIZE": o.cleanupArchiveBatchSize,
"CLEANUP_ARCHIVE_READ_DAYS": o.cleanupArchiveReadDays,
"CLEANUP_ARCHIVE_UNREAD_DAYS": o.cleanupArchiveUnreadDays,
"CLEANUP_FREQUENCY_HOURS": o.cleanupFrequencyHours,
"CLEANUP_REMOVE_SESSIONS_DAYS": o.cleanupRemoveSessionsDays,
"CREATE_ADMIN": o.createAdmin,
"DATABASE_CONNECTION_LIFETIME": o.databaseConnectionLifetime,
"DATABASE_MAX_CONNS": o.databaseMaxConns,
"DATABASE_MIN_CONNS": o.databaseMinConns,
"DATABASE_URL": redactSecretValue(o.databaseURL, redactSecret),
"DISABLE_HSTS": !o.hsts,
"DISABLE_HTTP_SERVICE": !o.httpService,
"DISABLE_SCHEDULER_SERVICE": !o.schedulerService,
"FILTER_ENTRY_MAX_AGE_DAYS": o.filterEntryMaxAgeDays,
"FETCH_YOUTUBE_WATCH_TIME": o.fetchYouTubeWatchTime,
"FETCH_NEBULA_WATCH_TIME": o.fetchNebulaWatchTime,
"FETCH_ODYSEE_WATCH_TIME": o.fetchOdyseeWatchTime,
"FETCH_BILIBILI_WATCH_TIME": o.fetchBilibiliWatchTime,
"HTTPS": o.HTTPS,
"HTTP_CLIENT_MAX_BODY_SIZE": o.httpClientMaxBodySize,
"HTTP_CLIENT_PROXY": o.httpClientProxy,
"HTTP_CLIENT_TIMEOUT": o.httpClientTimeout,
"HTTP_CLIENT_USER_AGENT": o.httpClientUserAgent,
"HTTP_SERVER_TIMEOUT": o.httpServerTimeout,
"HTTP_SERVICE": o.httpService,
"INVIDIOUS_INSTANCE": o.invidiousInstance,
"KEY_FILE": o.certKeyFile,
"LISTEN_ADDR": o.listenAddr,
"LOG_FILE": o.logFile,
"LOG_DATE_TIME": o.logDateTime,
"LOG_FORMAT": o.logFormat,
"LOG_LEVEL": o.logLevel,
"MAINTENANCE_MESSAGE": o.maintenanceMessage,
"MAINTENANCE_MODE": o.maintenanceMode,
"METRICS_ALLOWED_NETWORKS": strings.Join(o.metricsAllowedNetworks, ","),
"METRICS_COLLECTOR": o.metricsCollector,
"METRICS_PASSWORD": redactSecretValue(o.metricsPassword, redactSecret),
"METRICS_REFRESH_INTERVAL": o.metricsRefreshInterval,
"METRICS_USERNAME": o.metricsUsername,
"OAUTH2_CLIENT_ID": o.oauth2ClientID,
"OAUTH2_CLIENT_SECRET": redactSecretValue(o.oauth2ClientSecret, redactSecret),
"OAUTH2_OIDC_DISCOVERY_ENDPOINT": o.oidcDiscoveryEndpoint,
"OAUTH2_OIDC_PROVIDER_NAME": o.oidcProviderName,
"OAUTH2_PROVIDER": o.oauth2Provider,
"OAUTH2_REDIRECT_URL": o.oauth2RedirectURL,
"OAUTH2_USER_CREATION": o.oauth2UserCreationAllowed,
"DISABLE_LOCAL_AUTH": o.disableLocalAuth,
"POCKET_CONSUMER_KEY": redactSecretValue(o.pocketConsumerKey, redactSecret),
"POLLING_FREQUENCY": o.pollingFrequency,
"FORCE_REFRESH_INTERVAL": o.forceRefreshInterval,
"POLLING_PARSING_ERROR_LIMIT": o.pollingParsingErrorLimit,
"POLLING_SCHEDULER": o.pollingScheduler,
"MEDIA_PROXY_HTTP_CLIENT_TIMEOUT": o.mediaProxyHTTPClientTimeout,
"MEDIA_PROXY_RESOURCE_TYPES": o.mediaProxyResourceTypes,
"MEDIA_PROXY_MODE": o.mediaProxyMode,
"MEDIA_PROXY_PRIVATE_KEY": redactSecretValue(string(o.mediaProxyPrivateKey), redactSecret),
"MEDIA_PROXY_CUSTOM_URL": o.mediaProxyCustomURL,
"ROOT_URL": o.rootURL,
"RUN_MIGRATIONS": o.runMigrations,
"SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL": o.schedulerEntryFrequencyMaxInterval,
"SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL": o.schedulerEntryFrequencyMinInterval,
"SCHEDULER_ENTRY_FREQUENCY_FACTOR": o.schedulerEntryFrequencyFactor,
"SCHEDULER_ROUND_ROBIN_MIN_INTERVAL": o.schedulerRoundRobinMinInterval,
"SCHEDULER_SERVICE": o.schedulerService,
"SERVER_TIMING_HEADER": o.serverTimingHeader,
"WATCHDOG": o.watchdog,
"WORKER_POOL_SIZE": o.workerPoolSize,
"YOUTUBE_API_KEY": redactSecretValue(o.youTubeApiKey, redactSecret),
"YOUTUBE_EMBED_URL_OVERRIDE": o.youTubeEmbedUrlOverride,
"WEBAUTHN": o.webAuthn,
}
keys := make([]string, 0, len(keyValues))
for key := range keyValues {
keys = append(keys, key)
}
sort.Strings(keys)
var sortedOptions []*Option
for _, key := range keys {
sortedOptions = append(sortedOptions, &Option{Key: key, Value: keyValues[key]})
}
return sortedOptions
}
func (o *Options) String() string {
var builder strings.Builder
for _, option := range o.SortedOptions(false) {
fmt.Fprintf(&builder, "%s=%v\n", option.Key, option.Value)
}
return builder.String()
}
func redactSecretValue(value string, redactSecret bool) string {
if redactSecret && value != "" {
return ""
}
return value
}
v2-2.2.6/internal/config/parser.go 0000664 0000000 0000000 00000033757 14756465373 0017017 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package config // import "miniflux.app/v2/internal/config"
import (
"bufio"
"bytes"
"crypto/rand"
"errors"
"fmt"
"io"
"log/slog"
"net/url"
"os"
"strconv"
"strings"
)
// Parser handles configuration parsing.
type Parser struct {
opts *Options
}
// NewParser returns a new Parser.
func NewParser() *Parser {
return &Parser{
opts: NewOptions(),
}
}
// ParseEnvironmentVariables loads configuration values from environment variables.
func (p *Parser) ParseEnvironmentVariables() (*Options, error) {
err := p.parseLines(os.Environ())
if err != nil {
return nil, err
}
return p.opts, nil
}
// ParseFile loads configuration values from a local file.
func (p *Parser) ParseFile(filename string) (*Options, error) {
fp, err := os.Open(filename)
if err != nil {
return nil, err
}
defer fp.Close()
err = p.parseLines(p.parseFileContent(fp))
if err != nil {
return nil, err
}
return p.opts, nil
}
func (p *Parser) parseFileContent(r io.Reader) (lines []string) {
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if !strings.HasPrefix(line, "#") && strings.Index(line, "=") > 0 {
lines = append(lines, line)
}
}
return lines
}
func (p *Parser) parseLines(lines []string) (err error) {
var port string
for _, line := range lines {
fields := strings.SplitN(line, "=", 2)
key := strings.TrimSpace(fields[0])
value := strings.TrimSpace(fields[1])
switch key {
case "LOG_FILE":
p.opts.logFile = parseString(value, defaultLogFile)
case "LOG_DATE_TIME":
p.opts.logDateTime = parseBool(value, defaultLogDateTime)
case "LOG_LEVEL":
parsedValue := parseString(value, defaultLogLevel)
if parsedValue == "debug" || parsedValue == "info" || parsedValue == "warning" || parsedValue == "error" {
p.opts.logLevel = parsedValue
}
case "LOG_FORMAT":
parsedValue := parseString(value, defaultLogFormat)
if parsedValue == "json" || parsedValue == "text" {
p.opts.logFormat = parsedValue
}
case "DEBUG":
slog.Warn("The DEBUG environment variable is deprecated, use LOG_LEVEL instead")
parsedValue := parseBool(value, defaultDebug)
if parsedValue {
p.opts.logLevel = "debug"
}
case "SERVER_TIMING_HEADER":
p.opts.serverTimingHeader = parseBool(value, defaultTiming)
case "BASE_URL":
p.opts.baseURL, p.opts.rootURL, p.opts.basePath, err = parseBaseURL(value)
if err != nil {
return err
}
case "PORT":
port = value
case "LISTEN_ADDR":
p.opts.listenAddr = parseString(value, defaultListenAddr)
case "DATABASE_URL":
p.opts.databaseURL = parseString(value, defaultDatabaseURL)
case "DATABASE_URL_FILE":
p.opts.databaseURL = readSecretFile(value, defaultDatabaseURL)
case "DATABASE_MAX_CONNS":
p.opts.databaseMaxConns = parseInt(value, defaultDatabaseMaxConns)
case "DATABASE_MIN_CONNS":
p.opts.databaseMinConns = parseInt(value, defaultDatabaseMinConns)
case "DATABASE_CONNECTION_LIFETIME":
p.opts.databaseConnectionLifetime = parseInt(value, defaultDatabaseConnectionLifetime)
case "FILTER_ENTRY_MAX_AGE_DAYS":
p.opts.filterEntryMaxAgeDays = parseInt(value, defaultFilterEntryMaxAgeDays)
case "RUN_MIGRATIONS":
p.opts.runMigrations = parseBool(value, defaultRunMigrations)
case "DISABLE_HSTS":
p.opts.hsts = !parseBool(value, defaultHSTS)
case "HTTPS":
p.opts.HTTPS = parseBool(value, defaultHTTPS)
case "DISABLE_SCHEDULER_SERVICE":
p.opts.schedulerService = !parseBool(value, defaultSchedulerService)
case "DISABLE_HTTP_SERVICE":
p.opts.httpService = !parseBool(value, defaultHTTPService)
case "CERT_FILE":
p.opts.certFile = parseString(value, defaultCertFile)
case "KEY_FILE":
p.opts.certKeyFile = parseString(value, defaultKeyFile)
case "CERT_DOMAIN":
p.opts.certDomain = parseString(value, defaultCertDomain)
case "CLEANUP_FREQUENCY_HOURS":
p.opts.cleanupFrequencyHours = parseInt(value, defaultCleanupFrequencyHours)
case "CLEANUP_ARCHIVE_READ_DAYS":
p.opts.cleanupArchiveReadDays = parseInt(value, defaultCleanupArchiveReadDays)
case "CLEANUP_ARCHIVE_UNREAD_DAYS":
p.opts.cleanupArchiveUnreadDays = parseInt(value, defaultCleanupArchiveUnreadDays)
case "CLEANUP_ARCHIVE_BATCH_SIZE":
p.opts.cleanupArchiveBatchSize = parseInt(value, defaultCleanupArchiveBatchSize)
case "CLEANUP_REMOVE_SESSIONS_DAYS":
p.opts.cleanupRemoveSessionsDays = parseInt(value, defaultCleanupRemoveSessionsDays)
case "WORKER_POOL_SIZE":
p.opts.workerPoolSize = parseInt(value, defaultWorkerPoolSize)
case "POLLING_FREQUENCY":
p.opts.pollingFrequency = parseInt(value, defaultPollingFrequency)
case "FORCE_REFRESH_INTERVAL":
p.opts.forceRefreshInterval = parseInt(value, defaultForceRefreshInterval)
case "BATCH_SIZE":
p.opts.batchSize = parseInt(value, defaultBatchSize)
case "POLLING_SCHEDULER":
p.opts.pollingScheduler = strings.ToLower(parseString(value, defaultPollingScheduler))
case "SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL":
p.opts.schedulerEntryFrequencyMaxInterval = parseInt(value, defaultSchedulerEntryFrequencyMaxInterval)
case "SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL":
p.opts.schedulerEntryFrequencyMinInterval = parseInt(value, defaultSchedulerEntryFrequencyMinInterval)
case "SCHEDULER_ENTRY_FREQUENCY_FACTOR":
p.opts.schedulerEntryFrequencyFactor = parseInt(value, defaultSchedulerEntryFrequencyFactor)
case "SCHEDULER_ROUND_ROBIN_MIN_INTERVAL":
p.opts.schedulerRoundRobinMinInterval = parseInt(value, defaultSchedulerRoundRobinMinInterval)
case "POLLING_PARSING_ERROR_LIMIT":
p.opts.pollingParsingErrorLimit = parseInt(value, defaultPollingParsingErrorLimit)
case "PROXY_IMAGES":
slog.Warn("The PROXY_IMAGES environment variable is deprecated, use MEDIA_PROXY_MODE instead")
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
case "PROXY_HTTP_CLIENT_TIMEOUT":
slog.Warn("The PROXY_HTTP_CLIENT_TIMEOUT environment variable is deprecated, use MEDIA_PROXY_HTTP_CLIENT_TIMEOUT instead")
p.opts.mediaProxyHTTPClientTimeout = parseInt(value, defaultMediaProxyHTTPClientTimeout)
case "MEDIA_PROXY_HTTP_CLIENT_TIMEOUT":
p.opts.mediaProxyHTTPClientTimeout = parseInt(value, defaultMediaProxyHTTPClientTimeout)
case "PROXY_OPTION":
slog.Warn("The PROXY_OPTION environment variable is deprecated, use MEDIA_PROXY_MODE instead")
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
case "MEDIA_PROXY_MODE":
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
case "PROXY_MEDIA_TYPES":
slog.Warn("The PROXY_MEDIA_TYPES environment variable is deprecated, use MEDIA_PROXY_RESOURCE_TYPES instead")
p.opts.mediaProxyResourceTypes = parseStringList(value, []string{defaultMediaResourceTypes})
case "MEDIA_PROXY_RESOURCE_TYPES":
p.opts.mediaProxyResourceTypes = parseStringList(value, []string{defaultMediaResourceTypes})
case "PROXY_IMAGE_URL":
slog.Warn("The PROXY_IMAGE_URL environment variable is deprecated, use MEDIA_PROXY_CUSTOM_URL instead")
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
case "PROXY_URL":
slog.Warn("The PROXY_URL environment variable is deprecated, use MEDIA_PROXY_CUSTOM_URL instead")
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
case "PROXY_PRIVATE_KEY":
slog.Warn("The PROXY_PRIVATE_KEY environment variable is deprecated, use MEDIA_PROXY_PRIVATE_KEY instead")
randomKey := make([]byte, 16)
rand.Read(randomKey)
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
case "MEDIA_PROXY_PRIVATE_KEY":
randomKey := make([]byte, 16)
rand.Read(randomKey)
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
case "MEDIA_PROXY_CUSTOM_URL":
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
case "CREATE_ADMIN":
p.opts.createAdmin = parseBool(value, defaultCreateAdmin)
case "ADMIN_USERNAME":
p.opts.adminUsername = parseString(value, defaultAdminUsername)
case "ADMIN_USERNAME_FILE":
p.opts.adminUsername = readSecretFile(value, defaultAdminUsername)
case "ADMIN_PASSWORD":
p.opts.adminPassword = parseString(value, defaultAdminPassword)
case "ADMIN_PASSWORD_FILE":
p.opts.adminPassword = readSecretFile(value, defaultAdminPassword)
case "POCKET_CONSUMER_KEY":
p.opts.pocketConsumerKey = parseString(value, defaultPocketConsumerKey)
case "POCKET_CONSUMER_KEY_FILE":
p.opts.pocketConsumerKey = readSecretFile(value, defaultPocketConsumerKey)
case "OAUTH2_USER_CREATION":
p.opts.oauth2UserCreationAllowed = parseBool(value, defaultOAuth2UserCreation)
case "OAUTH2_CLIENT_ID":
p.opts.oauth2ClientID = parseString(value, defaultOAuth2ClientID)
case "OAUTH2_CLIENT_ID_FILE":
p.opts.oauth2ClientID = readSecretFile(value, defaultOAuth2ClientID)
case "OAUTH2_CLIENT_SECRET":
p.opts.oauth2ClientSecret = parseString(value, defaultOAuth2ClientSecret)
case "OAUTH2_CLIENT_SECRET_FILE":
p.opts.oauth2ClientSecret = readSecretFile(value, defaultOAuth2ClientSecret)
case "OAUTH2_REDIRECT_URL":
p.opts.oauth2RedirectURL = parseString(value, defaultOAuth2RedirectURL)
case "OAUTH2_OIDC_DISCOVERY_ENDPOINT":
p.opts.oidcDiscoveryEndpoint = parseString(value, defaultOAuth2OidcDiscoveryEndpoint)
case "OAUTH2_OIDC_PROVIDER_NAME":
p.opts.oidcProviderName = parseString(value, defaultOauth2OidcProviderName)
case "OAUTH2_PROVIDER":
p.opts.oauth2Provider = parseString(value, defaultOAuth2Provider)
case "DISABLE_LOCAL_AUTH":
p.opts.disableLocalAuth = parseBool(value, defaultDisableLocalAuth)
case "HTTP_CLIENT_TIMEOUT":
p.opts.httpClientTimeout = parseInt(value, defaultHTTPClientTimeout)
case "HTTP_CLIENT_MAX_BODY_SIZE":
p.opts.httpClientMaxBodySize = int64(parseInt(value, defaultHTTPClientMaxBodySize) * 1024 * 1024)
case "HTTP_CLIENT_PROXY":
p.opts.httpClientProxy = parseString(value, defaultHTTPClientProxy)
case "HTTP_CLIENT_USER_AGENT":
p.opts.httpClientUserAgent = parseString(value, defaultHTTPClientUserAgent)
case "HTTP_SERVER_TIMEOUT":
p.opts.httpServerTimeout = parseInt(value, defaultHTTPServerTimeout)
case "AUTH_PROXY_HEADER":
p.opts.authProxyHeader = parseString(value, defaultAuthProxyHeader)
case "AUTH_PROXY_USER_CREATION":
p.opts.authProxyUserCreation = parseBool(value, defaultAuthProxyUserCreation)
case "MAINTENANCE_MODE":
p.opts.maintenanceMode = parseBool(value, defaultMaintenanceMode)
case "MAINTENANCE_MESSAGE":
p.opts.maintenanceMessage = parseString(value, defaultMaintenanceMessage)
case "METRICS_COLLECTOR":
p.opts.metricsCollector = parseBool(value, defaultMetricsCollector)
case "METRICS_REFRESH_INTERVAL":
p.opts.metricsRefreshInterval = parseInt(value, defaultMetricsRefreshInterval)
case "METRICS_ALLOWED_NETWORKS":
p.opts.metricsAllowedNetworks = parseStringList(value, []string{defaultMetricsAllowedNetworks})
case "METRICS_USERNAME":
p.opts.metricsUsername = parseString(value, defaultMetricsUsername)
case "METRICS_USERNAME_FILE":
p.opts.metricsUsername = readSecretFile(value, defaultMetricsUsername)
case "METRICS_PASSWORD":
p.opts.metricsPassword = parseString(value, defaultMetricsPassword)
case "METRICS_PASSWORD_FILE":
p.opts.metricsPassword = readSecretFile(value, defaultMetricsPassword)
case "FETCH_BILIBILI_WATCH_TIME":
p.opts.fetchBilibiliWatchTime = parseBool(value, defaultFetchBilibiliWatchTime)
case "FETCH_NEBULA_WATCH_TIME":
p.opts.fetchNebulaWatchTime = parseBool(value, defaultFetchNebulaWatchTime)
case "FETCH_ODYSEE_WATCH_TIME":
p.opts.fetchOdyseeWatchTime = parseBool(value, defaultFetchOdyseeWatchTime)
case "FETCH_YOUTUBE_WATCH_TIME":
p.opts.fetchYouTubeWatchTime = parseBool(value, defaultFetchYouTubeWatchTime)
case "YOUTUBE_API_KEY":
p.opts.youTubeApiKey = parseString(value, defaultYouTubeApiKey)
case "YOUTUBE_EMBED_URL_OVERRIDE":
p.opts.youTubeEmbedUrlOverride = parseString(value, defaultYouTubeEmbedUrlOverride)
case "WATCHDOG":
p.opts.watchdog = parseBool(value, defaultWatchdog)
case "INVIDIOUS_INSTANCE":
p.opts.invidiousInstance = parseString(value, defaultInvidiousInstance)
case "WEBAUTHN":
p.opts.webAuthn = parseBool(value, defaultWebAuthn)
}
}
if port != "" {
p.opts.listenAddr = ":" + port
}
return nil
}
func parseBaseURL(value string) (string, string, string, error) {
if value == "" {
return defaultBaseURL, defaultRootURL, "", nil
}
if value[len(value)-1:] == "/" {
value = value[:len(value)-1]
}
parsedURL, err := url.Parse(value)
if err != nil {
return "", "", "", fmt.Errorf("config: invalid BASE_URL: %w", err)
}
scheme := strings.ToLower(parsedURL.Scheme)
if scheme != "https" && scheme != "http" {
return "", "", "", errors.New("config: invalid BASE_URL: scheme must be http or https")
}
basePath := parsedURL.Path
parsedURL.Path = ""
return value, parsedURL.String(), basePath, nil
}
func parseBool(value string, fallback bool) bool {
if value == "" {
return fallback
}
value = strings.ToLower(value)
if value == "1" || value == "yes" || value == "true" || value == "on" {
return true
}
return false
}
func parseInt(value string, fallback int) int {
if value == "" {
return fallback
}
v, err := strconv.Atoi(value)
if err != nil {
return fallback
}
return v
}
func parseString(value string, fallback string) string {
if value == "" {
return fallback
}
return value
}
func parseStringList(value string, fallback []string) []string {
if value == "" {
return fallback
}
var strList []string
strMap := make(map[string]bool)
items := strings.Split(value, ",")
for _, item := range items {
itemValue := strings.TrimSpace(item)
if _, found := strMap[itemValue]; !found {
strMap[itemValue] = true
strList = append(strList, itemValue)
}
}
return strList
}
func parseBytes(value string, fallback []byte) []byte {
if value == "" {
return fallback
}
return []byte(value)
}
func readSecretFile(filename, fallback string) string {
data, err := os.ReadFile(filename)
if err != nil {
return fallback
}
value := string(bytes.TrimSpace(data))
if value == "" {
return fallback
}
return value
}
v2-2.2.6/internal/config/parser_test.go 0000664 0000000 0000000 00000002722 14756465373 0020042 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package config // import "miniflux.app/v2/internal/config"
import (
"testing"
)
func TestParseBoolValue(t *testing.T) {
scenarios := map[string]bool{
"": true,
"1": true,
"Yes": true,
"yes": true,
"True": true,
"true": true,
"on": true,
"false": false,
"off": false,
"invalid": false,
}
for input, expected := range scenarios {
result := parseBool(input, true)
if result != expected {
t.Errorf(`Unexpected result for %q, got %v instead of %v`, input, result, expected)
}
}
}
func TestParseStringValueWithUnsetVariable(t *testing.T) {
if parseString("", "defaultValue") != "defaultValue" {
t.Errorf(`Unset variables should returns the default value`)
}
}
func TestParseStringValue(t *testing.T) {
if parseString("test", "defaultValue") != "test" {
t.Errorf(`Defined variables should returns the specified value`)
}
}
func TestParseIntValueWithUnsetVariable(t *testing.T) {
if parseInt("", 42) != 42 {
t.Errorf(`Unset variables should returns the default value`)
}
}
func TestParseIntValueWithInvalidInput(t *testing.T) {
if parseInt("invalid integer", 42) != 42 {
t.Errorf(`Invalid integer should returns the default value`)
}
}
func TestParseIntValue(t *testing.T) {
if parseInt("2018", 42) != 2018 {
t.Errorf(`Defined variables should returns the specified value`)
}
}
v2-2.2.6/internal/crypto/ 0000775 0000000 0000000 00000000000 14756465373 0015230 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/crypto/crypto.go 0000664 0000000 0000000 00000003234 14756465373 0017101 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package crypto // import "miniflux.app/v2/internal/crypto"
import (
"crypto/hmac"
"crypto/rand"
"crypto/sha256"
"crypto/subtle"
"encoding/base64"
"encoding/hex"
"fmt"
"golang.org/x/crypto/bcrypt"
)
// HashFromBytes returns a SHA-256 checksum of the input.
func HashFromBytes(value []byte) string {
return fmt.Sprintf("%x", sha256.Sum256(value))
}
// Hash returns a SHA-256 checksum of a string.
func Hash(value string) string {
return HashFromBytes([]byte(value))
}
// GenerateRandomBytes returns random bytes.
func GenerateRandomBytes(size int) []byte {
b := make([]byte, size)
if _, err := rand.Read(b); err != nil {
panic(err)
}
return b
}
// GenerateRandomString returns a random string.
func GenerateRandomString(size int) string {
return base64.URLEncoding.EncodeToString(GenerateRandomBytes(size))
}
// GenerateRandomStringHex returns a random hexadecimal string.
func GenerateRandomStringHex(size int) string {
return hex.EncodeToString(GenerateRandomBytes(size))
}
func HashPassword(password string) (string, error) {
bytes, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
return string(bytes), err
}
func GenerateSHA256Hmac(secret string, data []byte) string {
h := hmac.New(sha256.New, []byte(secret))
h.Write(data)
return hex.EncodeToString(h.Sum(nil))
}
func GenerateUUID() string {
b := GenerateRandomBytes(16)
return fmt.Sprintf("%X-%X-%X-%X-%X", b[0:4], b[4:6], b[6:8], b[8:10], b[10:])
}
func ConstantTimeCmp(a, b string) bool {
return subtle.ConstantTimeCompare([]byte(a), []byte(b)) == 1
}
v2-2.2.6/internal/database/ 0000775 0000000 0000000 00000000000 14756465373 0015454 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/database/database.go 0000664 0000000 0000000 00000003356 14756465373 0017556 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package database // import "miniflux.app/v2/internal/database"
import (
"database/sql"
"fmt"
"log/slog"
)
// Migrate executes database migrations.
func Migrate(db *sql.DB) error {
var currentVersion int
db.QueryRow(`SELECT version FROM schema_version`).Scan(¤tVersion)
driver := getDriverStr()
slog.Info("Running database migrations",
slog.Int("current_version", currentVersion),
slog.Int("latest_version", schemaVersion),
slog.String("driver", driver),
)
for version := currentVersion; version < schemaVersion; version++ {
newVersion := version + 1
tx, err := db.Begin()
if err != nil {
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if err := migrations[version](tx, driver); err != nil {
tx.Rollback()
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if _, err := tx.Exec(`DELETE FROM schema_version`); err != nil {
tx.Rollback()
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if _, err := tx.Exec(`INSERT INTO schema_version (version) VALUES ($1)`, newVersion); err != nil {
tx.Rollback()
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if err := tx.Commit(); err != nil {
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
}
return nil
}
// IsSchemaUpToDate checks if the database schema is up to date.
func IsSchemaUpToDate(db *sql.DB) error {
var currentVersion int
db.QueryRow(`SELECT version FROM schema_version`).Scan(¤tVersion)
if currentVersion < schemaVersion {
return fmt.Errorf(`the database schema is not up to date: current=v%d expected=v%d`, currentVersion, schemaVersion)
}
return nil
}
v2-2.2.6/internal/database/migrations.go 0000664 0000000 0000000 00000075317 14756465373 0020174 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package database // import "miniflux.app/v2/internal/database"
import (
"database/sql"
)
var schemaVersion = len(migrations)
// Order is important. Add new migrations at the end of the list.
var migrations = []func(tx *sql.Tx, driver string) error{
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TABLE schema_version (
version text not null
);
CREATE TABLE users (
id serial not null,
username text not null unique,
password text,
is_admin bool default 'f',
language text default 'en_US',
timezone text default 'UTC',
theme text default 'default',
last_login_at timestamp with time zone,
primary key (id)
);
CREATE TABLE sessions (
id serial not null,
user_id int not null,
token text not null unique,
created_at timestamp with time zone default now(),
user_agent text,
ip text,
primary key (id),
unique (user_id, token),
foreign key (user_id) references users(id) on delete cascade
);
CREATE TABLE categories (
id serial not null,
user_id int not null,
title text not null,
primary key (id),
unique (user_id, title),
foreign key (user_id) references users(id) on delete cascade
);
CREATE TABLE feeds (
id bigserial not null,
user_id int not null,
category_id int not null,
title text not null,
feed_url text not null,
site_url text not null,
checked_at timestamp with time zone default now(),
etag_header text default '',
last_modified_header text default '',
parsing_error_msg text default '',
parsing_error_count int default 0,
primary key (id),
unique (user_id, feed_url),
foreign key (user_id) references users(id) on delete cascade,
foreign key (category_id) references categories(id) on delete cascade
);
CREATE TYPE entry_status as enum('unread', 'read', 'removed');
CREATE TABLE entries (
id bigserial not null,
user_id int not null,
feed_id bigint not null,
hash text not null,
published_at timestamp with time zone not null,
title text not null,
url text not null,
author text,
content text,
status entry_status default 'unread',
primary key (id),
unique (feed_id, hash),
foreign key (user_id) references users(id) on delete cascade,
foreign key (feed_id) references feeds(id) on delete cascade
);
CREATE INDEX entries_feed_idx on entries using btree(feed_id);
CREATE TABLE enclosures (
id bigserial not null,
user_id int not null,
entry_id bigint not null,
url text not null,
size int default 0,
mime_type text default '',
primary key (id),
foreign key (user_id) references users(id) on delete cascade,
foreign key (entry_id) references entries(id) on delete cascade
);
CREATE TABLE icons (
id bigserial not null,
hash text not null unique,
mime_type text not null,
content bytea not null,
primary key (id)
);
CREATE TABLE feed_icons (
feed_id bigint not null,
icon_id bigint not null,
primary key(feed_id, icon_id),
foreign key (feed_id) references feeds(id) on delete cascade,
foreign key (icon_id) references icons(id) on delete cascade
);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, driver string) (err error) {
if driver == "postgresql" {
sql := `
CREATE EXTENSION IF NOT EXISTS hstore;
ALTER TABLE users ADD COLUMN extra hstore;
CREATE INDEX users_extra_idx ON users using gin(extra);
`
_, err = tx.Exec(sql)
return err
}
return nil
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TABLE tokens (
id text not null,
value text not null,
created_at timestamp with time zone not null default now(),
primary key(id, value)
);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TYPE entry_sorting_direction AS enum('asc', 'desc');
ALTER TABLE users ADD COLUMN entry_direction entry_sorting_direction default 'asc';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TABLE integrations (
user_id int not null,
pinboard_enabled bool default 'f',
pinboard_token text default '',
pinboard_tags text default 'miniflux',
pinboard_mark_as_unread bool default 'f',
instapaper_enabled bool default 'f',
instapaper_username text default '',
instapaper_password text default '',
fever_enabled bool default 'f',
fever_username text default '',
fever_password text default '',
fever_token text default '',
primary key(user_id)
);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN scraper_rules text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN rewrite_rules text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN crawler boolean default 'f'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE sessions rename to user_sessions`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
DROP TABLE tokens;
CREATE TABLE sessions (
id text not null,
data jsonb not null,
created_at timestamp with time zone not null default now(),
primary key(id)
);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN wallabag_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN wallabag_url text default '';
ALTER TABLE integrations ADD COLUMN wallabag_client_id text default '';
ALTER TABLE integrations ADD COLUMN wallabag_client_secret text default '';
ALTER TABLE integrations ADD COLUMN wallabag_username text default '';
ALTER TABLE integrations ADD COLUMN wallabag_password text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE entries ADD COLUMN starred bool default 'f'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE INDEX entries_user_status_idx ON entries(user_id, status);
CREATE INDEX feeds_user_category_idx ON feeds(user_id, category_id);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN nunux_keeper_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN nunux_keeper_url text default '';
ALTER TABLE integrations ADD COLUMN nunux_keeper_api_key text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE enclosures ALTER COLUMN size SET DATA TYPE bigint`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE entries ADD COLUMN comments_url text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN pocket_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN pocket_access_token text default '';
ALTER TABLE integrations ADD COLUMN pocket_consumer_key text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE user_sessions ALTER COLUMN ip SET DATA TYPE inet using ip::inet;
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE feeds ADD COLUMN username text default '';
ALTER TABLE feeds ADD COLUMN password text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE entries ADD COLUMN document_vectors tsvector;
UPDATE entries SET document_vectors = to_tsvector(substring(title || ' ' || coalesce(content, '') for 1000000));
CREATE INDEX document_vectors_idx ON entries USING gin(document_vectors);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN user_agent text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
UPDATE
entries
SET
document_vectors = setweight(to_tsvector(substring(coalesce(title, '') for 1000000)), 'A') || setweight(to_tsvector(substring(coalesce(content, '') for 1000000)), 'B')
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN keyboard_shortcuts boolean default 't'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN disabled boolean default 'f';`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE users ALTER COLUMN theme SET DEFAULT 'light_serif';
UPDATE users SET theme='light_serif' WHERE theme='default';
UPDATE users SET theme='light_sans_serif' WHERE theme='sansserif';
UPDATE users SET theme='dark_serif' WHERE theme='black';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE entries ADD COLUMN changed_at timestamp with time zone;
UPDATE entries SET changed_at = published_at;
ALTER TABLE entries ALTER COLUMN changed_at SET not null;
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TABLE api_keys (
id serial not null,
user_id int not null references users(id) on delete cascade,
token text not null unique,
description text not null,
last_used_at timestamp with time zone,
created_at timestamp with time zone default now(),
primary key(id),
unique (user_id, description)
);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE entries ADD COLUMN share_code text not null default '';
CREATE UNIQUE INDEX entries_share_code_idx ON entries USING btree(share_code) WHERE share_code <> '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `CREATE INDEX enclosures_user_entry_url_idx ON enclosures(user_id, entry_id, md5(url))`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE feeds ADD COLUMN next_check_at timestamp with time zone default now();
CREATE INDEX entries_user_feed_idx ON entries (user_id, feed_id);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN ignore_http_cache bool default false`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN entries_per_page int default 100`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN show_reading_time boolean default 't'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `CREATE INDEX entries_id_user_status_idx ON entries USING btree (id, user_id, status)`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN fetch_via_proxy bool default false`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `CREATE INDEX entries_feed_id_status_hash_idx ON entries USING btree (feed_id, status, hash)`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `CREATE INDEX entries_user_id_status_starred_idx ON entries (user_id, status, starred)`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN entry_swipe boolean default 't'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE integrations DROP COLUMN fever_password`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE feeds
ADD COLUMN blocklist_rules text not null default '',
ADD COLUMN keeplist_rules text not null default ''
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE entries ADD COLUMN reading_time int not null default 0`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE entries ADD COLUMN created_at timestamp with time zone not null default now();
UPDATE entries SET created_at = published_at;
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, driver string) (err error) {
_, err = tx.Exec(`
ALTER TABLE users
ADD column stylesheet text not null default '',
ADD column google_id text not null default '',
ADD column openid_connect_id text not null default ''
`)
if err != nil {
return err
}
if driver == "postgresql" {
_, err = tx.Exec(`
DECLARE my_cursor CURSOR FOR
SELECT
id,
COALESCE(extra->'custom_css', '') as custom_css,
COALESCE(extra->'google_id', '') as google_id,
COALESCE(extra->'oidc_id', '') as oidc_id
FROM users
FOR UPDATE
`)
if err != nil {
return err
}
defer tx.Exec("CLOSE my_cursor")
for {
var (
userID int64
customStylesheet string
googleID string
oidcID string
)
if err := tx.QueryRow(`FETCH NEXT FROM my_cursor`).Scan(&userID, &customStylesheet, &googleID, &oidcID); err != nil {
if err == sql.ErrNoRows {
break
}
return err
}
_, err := tx.Exec(
`UPDATE
users
SET
stylesheet=$2,
google_id=$3,
openid_connect_id=$4
WHERE
id=$1
`,
userID, customStylesheet, googleID, oidcID)
if err != nil {
return err
}
}
}
return err
},
func(tx *sql.Tx, driver string) (err error) {
if driver == "postgresql" {
if _, err = tx.Exec(`ALTER TABLE users DROP COLUMN extra;`); err != nil {
return err
}
}
_, err = tx.Exec(`
CREATE UNIQUE INDEX users_google_id_idx ON users(google_id) WHERE google_id <> '';
CREATE UNIQUE INDEX users_openid_connect_id_idx ON users(openid_connect_id) WHERE openid_connect_id <> '';
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
CREATE INDEX entries_feed_url_idx ON entries(feed_id, url);
CREATE INDEX entries_user_status_feed_idx ON entries(user_id, status, feed_id);
CREATE INDEX entries_user_status_changed_idx ON entries(user_id, status, changed_at);
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
CREATE TABLE acme_cache (
key varchar(400) not null primary key,
data bytea not null,
updated_at timestamptz not null
);
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN allow_self_signed_certificates boolean not null default false
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TYPE webapp_display_mode AS enum('fullscreen', 'standalone', 'minimal-ui', 'browser');
ALTER TABLE users ADD COLUMN display_mode webapp_display_mode default 'standalone';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN cookie text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE categories ADD COLUMN hide_globally boolean not null default false
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN hide_globally boolean not null default false
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN telegram_bot_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN telegram_bot_token text default '';
ALTER TABLE integrations ADD COLUMN telegram_bot_chat_id text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
CREATE TYPE entry_sorting_order AS enum('published_at', 'created_at');
ALTER TABLE users ADD COLUMN entry_order entry_sorting_order default 'published_at';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN googlereader_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN googlereader_username text default '';
ALTER TABLE integrations ADD COLUMN googlereader_password text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN espial_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN espial_url text default '';
ALTER TABLE integrations ADD COLUMN espial_api_key text default '';
ALTER TABLE integrations ADD COLUMN espial_tags text default 'miniflux';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN linkding_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN linkding_url text default '';
ALTER TABLE integrations ADD COLUMN linkding_api_key text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN url_rewrite_rules text not null default ''
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE users ADD COLUMN default_reading_speed int default 265;
ALTER TABLE users ADD COLUMN cjk_reading_speed int default 500;
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE users ADD COLUMN default_home_page text default 'unread';
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE integrations ADD COLUMN wallabag_only_url bool default 'f';
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE users ADD COLUMN categories_sorting_order text not null default 'unread_count';
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN matrix_bot_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN matrix_bot_user text default '';
ALTER TABLE integrations ADD COLUMN matrix_bot_password text default '';
ALTER TABLE integrations ADD COLUMN matrix_bot_url text default '';
ALTER TABLE integrations ADD COLUMN matrix_bot_chat_id text default '';
`
_, err = tx.Exec(sql)
return
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN double_tap boolean default 't'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE entries ADD COLUMN tags text[] default '{}';
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE users RENAME double_tap TO gesture_nav;
ALTER TABLE users ALTER COLUMN gesture_nav SET DATA TYPE text using case when gesture_nav = true then 'tap' when gesture_nav = false then 'none' end;
ALTER TABLE users ALTER COLUMN gesture_nav SET default 'tap';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN linkding_tags text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE feeds ADD COLUMN no_media_player boolean default 'f';
ALTER TABLE enclosures ADD COLUMN media_progression int default 0;
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN linkding_mark_as_unread bool default 'f';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
// Delete duplicated rows
sql := `
DELETE FROM enclosures a USING enclosures b
WHERE a.id < b.id
AND a.user_id = b.user_id
AND a.entry_id = b.entry_id
AND a.url = b.url;
`
_, err = tx.Exec(sql)
if err != nil {
return err
}
// Remove previous index
_, err = tx.Exec(`DROP INDEX enclosures_user_entry_url_idx`)
if err != nil {
return err
}
// Create unique index
_, err = tx.Exec(`CREATE UNIQUE INDEX enclosures_user_entry_url_unique_idx ON enclosures(user_id, entry_id, md5(url))`)
if err != nil {
return err
}
return nil
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN mark_read_on_view boolean default 't'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN notion_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN notion_token text default '';
ALTER TABLE integrations ADD COLUMN notion_page_id text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN readwise_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN readwise_api_key text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN apprise_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN apprise_url text default '';
ALTER TABLE integrations ADD COLUMN apprise_services_url text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN shiori_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN shiori_url text default '';
ALTER TABLE integrations ADD COLUMN shiori_username text default '';
ALTER TABLE integrations ADD COLUMN shiori_password text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN shaarli_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN shaarli_url text default '';
ALTER TABLE integrations ADD COLUMN shaarli_api_secret text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN apprise_service_urls text default '';
`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN webhook_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN webhook_url text default '';
ALTER TABLE integrations ADD COLUMN webhook_secret text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN telegram_bot_topic_id int;
ALTER TABLE integrations ADD COLUMN telegram_bot_disable_web_page_preview bool default 'f';
ALTER TABLE integrations ADD COLUMN telegram_bot_disable_notification bool default 'f';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN telegram_bot_disable_buttons bool default 'f';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
-- Speed up has_enclosure
CREATE INDEX enclosures_entry_id_idx ON enclosures(entry_id);
-- Speed up unread page
CREATE INDEX entries_user_status_published_idx ON entries(user_id, status, published_at);
CREATE INDEX entries_user_status_created_idx ON entries(user_id, status, created_at);
CREATE INDEX feeds_feed_id_hide_globally_idx ON feeds(id, hide_globally);
-- Speed up history page
CREATE INDEX entries_user_status_changed_published_idx ON entries(user_id, status, changed_at, published_at);
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN rssbridge_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN rssbridge_url text default '';
`
_, err = tx.Exec(sql)
return
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`
CREATE TABLE webauthn_credentials (
handle bytea primary key,
cred_id bytea unique not null,
user_id int references users(id) on delete cascade not null,
public_key bytea not null,
attestation_type varchar(255) not null,
aaguid bytea,
sign_count bigint,
clone_warning bool,
name text,
added_on timestamp with time zone default now(),
last_seen_on timestamp with time zone default now()
);
`)
return
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN omnivore_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN omnivore_api_key text default '';
ALTER TABLE integrations ADD COLUMN omnivore_url text default '';
`
_, err = tx.Exec(sql)
return
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN linkace_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN linkace_url text default '';
ALTER TABLE integrations ADD COLUMN linkace_api_key text default '';
ALTER TABLE integrations ADD COLUMN linkace_tags text default '';
ALTER TABLE integrations ADD COLUMN linkace_is_private bool default 't';
ALTER TABLE integrations ADD COLUMN linkace_check_disabled bool default 't';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN linkwarden_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN linkwarden_url text default '';
ALTER TABLE integrations ADD COLUMN linkwarden_api_key text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN readeck_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN readeck_only_url bool default 'f';
ALTER TABLE integrations ADD COLUMN readeck_url text default '';
ALTER TABLE integrations ADD COLUMN readeck_api_key text default '';
ALTER TABLE integrations ADD COLUMN readeck_labels text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN disable_http2 bool default 'f'`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN media_playback_rate numeric default 1;`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
// the WHERE part speed-up the request a lot
sql := `UPDATE entries SET tags = array_remove(tags, '') WHERE '' = ANY(tags);`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
// Entry URLs can exceeds btree maximum size
// Checking entry existence is now using entries_feed_id_status_hash_idx index
_, err = tx.Exec(`DROP INDEX entries_feed_url_idx`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN raindrop_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN raindrop_token text default '';
ALTER TABLE integrations ADD COLUMN raindrop_collection_id text default '';
ALTER TABLE integrations ADD COLUMN raindrop_tags text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN description text default ''`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE users
ADD COLUMN block_filter_entry_rules text not null default '',
ADD COLUMN keep_filter_entry_rules text not null default ''
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN betula_url text default '';
ALTER TABLE integrations ADD COLUMN betula_token text default '';
ALTER TABLE integrations ADD COLUMN betula_enabled bool default 'f';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN ntfy_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN ntfy_url text default '';
ALTER TABLE integrations ADD COLUMN ntfy_topic text default '';
ALTER TABLE integrations ADD COLUMN ntfy_api_token text default '';
ALTER TABLE integrations ADD COLUMN ntfy_username text default '';
ALTER TABLE integrations ADD COLUMN ntfy_password text default '';
ALTER TABLE integrations ADD COLUMN ntfy_icon_url text default '';
ALTER TABLE feeds ADD COLUMN ntfy_enabled bool default 'f';
ALTER TABLE feeds ADD COLUMN ntfy_priority int default '3';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN mark_read_on_media_player_completion bool default 'f';`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN custom_js text not null default '';`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE users ADD COLUMN external_font_hosts text not null default '';`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN cubox_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN cubox_api_link text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN discord_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN discord_webhook_link text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `ALTER TABLE integrations ADD COLUMN ntfy_internal_links bool default 'f';`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN slack_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN slack_webhook_link text default '';
`
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx, _ string) (err error) {
_, err = tx.Exec(`ALTER TABLE feeds ADD COLUMN webhook_url text default '';`)
return err
},
func(tx *sql.Tx, _ string) (err error) {
sql := `
ALTER TABLE integrations ADD COLUMN pushover_enabled bool default 'f';
ALTER TABLE integrations ADD COLUMN pushover_user text default '';
ALTER TABLE integrations ADD COLUMN pushover_token text default '';
ALTER TABLE integrations ADD COLUMN pushover_device text default '';
ALTER TABLE integrations ADD COLUMN pushover_prefix text default '';
ALTER TABLE feeds ADD COLUMN pushover_enabled bool default 'f';
ALTER TABLE feeds ADD COLUMN pushover_priority int default '0';
`
_, err = tx.Exec(sql)
return err
},
}
v2-2.2.6/internal/database/postgresql.go 0000664 0000000 0000000 00000001312 14756465373 0020203 0 ustar 00root root 0000000 0000000 //go:build !sqlite
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package database // import "miniflux.app/v2/internal/database"
import (
"database/sql"
"time"
_ "github.com/lib/pq"
)
// NewConnectionPool configures the database connection pool.
func NewConnectionPool(dsn string, minConnections, maxConnections int, connectionLifetime time.Duration) (*sql.DB, error) {
db, err := sql.Open("postgres", dsn)
if err != nil {
return nil, err
}
db.SetMaxOpenConns(maxConnections)
db.SetMaxIdleConns(minConnections)
db.SetConnMaxLifetime(connectionLifetime)
return db, nil
}
func getDriverStr() string {
return "postgresql"
}
v2-2.2.6/internal/database/sqlite.go 0000664 0000000 0000000 00000001057 14756465373 0017307 0 ustar 00root root 0000000 0000000 //go:build sqlite
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package database // import "miniflux.app/v2/internal/database"
import (
"database/sql"
"time"
_ "github.com/mattn/go-sqlite3"
)
// NewConnectionPool configures the database connection pool.
func NewConnectionPool(dsn string, _, _ int, _ time.Duration) (*sql.DB, error) {
db, err := sql.Open("sqlite3", dsn)
if err != nil {
return nil, err
}
return db, nil
}
func getDriverStr() string {
return "sqlite3"
}
v2-2.2.6/internal/fever/ 0000775 0000000 0000000 00000000000 14756465373 0015017 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/fever/handler.go 0000664 0000000 0000000 00000036122 14756465373 0016767 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package fever // import "miniflux.app/v2/internal/fever"
import (
"log/slog"
"net/http"
"strconv"
"strings"
"time"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/integration"
"miniflux.app/v2/internal/mediaproxy"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
"github.com/gorilla/mux"
)
// Serve handles Fever API calls.
func Serve(router *mux.Router, store *storage.Storage) {
handler := &handler{store, router}
sr := router.PathPrefix("/fever").Subrouter()
sr.Use(newMiddleware(store).serve)
sr.HandleFunc("/", handler.serve).Name("feverEndpoint")
}
type handler struct {
store *storage.Storage
router *mux.Router
}
func (h *handler) serve(w http.ResponseWriter, r *http.Request) {
switch {
case request.HasQueryParam(r, "groups"):
h.handleGroups(w, r)
case request.HasQueryParam(r, "feeds"):
h.handleFeeds(w, r)
case request.HasQueryParam(r, "favicons"):
h.handleFavicons(w, r)
case request.HasQueryParam(r, "unread_item_ids"):
h.handleUnreadItems(w, r)
case request.HasQueryParam(r, "saved_item_ids"):
h.handleSavedItems(w, r)
case request.HasQueryParam(r, "items"):
h.handleItems(w, r)
case r.FormValue("mark") == "item":
h.handleWriteItems(w, r)
case r.FormValue("mark") == "feed":
h.handleWriteFeeds(w, r)
case r.FormValue("mark") == "group":
h.handleWriteGroups(w, r)
default:
json.OK(w, r, newBaseResponse())
}
}
/*
A request with the groups argument will return two additional members:
groups contains an array of group objects
feeds_groups contains an array of feeds_group objects
A group object has the following members:
id (positive integer)
title (utf-8 string)
The feeds_group object is documented under “Feeds/Groups Relationships.”
The “Kindling” super group is not included in this response and is composed of all feeds with
an is_spark equal to 0.
The “Sparks” super group is not included in this response and is composed of all feeds with an
is_spark equal to 1.
*/
func (h *handler) handleGroups(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Fetching groups",
slog.Int64("user_id", userID),
)
categories, err := h.store.Categories(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
feeds, err := h.store.Feeds(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
var result groupsResponse
for _, category := range categories {
result.Groups = append(result.Groups, group{ID: category.ID, Title: category.Title})
}
result.FeedsGroups = h.buildFeedGroups(feeds)
result.SetCommonValues()
json.OK(w, r, result)
}
/*
A request with the feeds argument will return two additional members:
feeds contains an array of group objects
feeds_groups contains an array of feeds_group objects
A feed object has the following members:
id (positive integer)
favicon_id (positive integer)
title (utf-8 string)
url (utf-8 string)
site_url (utf-8 string)
is_spark (boolean integer)
last_updated_on_time (Unix timestamp/integer)
The feeds_group object is documented under “Feeds/Groups Relationships.”
The “All Items” super feed is not included in this response and is composed of all items from all feeds
that belong to a given group. For the “Kindling” super group and all user created groups the items
should be limited to feeds with an is_spark equal to 0.
For the “Sparks” super group the items should be limited to feeds with an is_spark equal to 1.
*/
func (h *handler) handleFeeds(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Fetching feeds",
slog.Int64("user_id", userID),
)
feeds, err := h.store.Feeds(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
var result feedsResponse
result.Feeds = make([]feed, 0)
for _, f := range feeds {
subscripion := feed{
ID: f.ID,
Title: f.Title,
URL: f.FeedURL,
SiteURL: f.SiteURL,
IsSpark: 0,
LastUpdated: f.CheckedAt.Unix(),
}
if f.Icon != nil {
subscripion.FaviconID = f.Icon.IconID
}
result.Feeds = append(result.Feeds, subscripion)
}
result.FeedsGroups = h.buildFeedGroups(feeds)
result.SetCommonValues()
json.OK(w, r, result)
}
/*
A request with the favicons argument will return one additional member:
favicons contains an array of favicon objects
A favicon object has the following members:
id (positive integer)
data (base64 encoded image data; prefixed by image type)
An example data value:
image/gif;base64,R0lGODlhAQABAIAAAObm5gAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==
The data member of a favicon object can be used with the data: protocol to embed an image in CSS or HTML.
A PHP/HTML example:
echo '';
*/
func (h *handler) handleFavicons(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Fetching favicons",
slog.Int64("user_id", userID),
)
icons, err := h.store.Icons(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
var result faviconsResponse
for _, i := range icons {
result.Favicons = append(result.Favicons, favicon{
ID: i.ID,
Data: i.DataURL(),
})
}
result.SetCommonValues()
json.OK(w, r, result)
}
/*
A request with the items argument will return two additional members:
items contains an array of item objects
total_items contains the total number of items stored in the database (added in API version 2)
An item object has the following members:
id (positive integer)
feed_id (positive integer)
title (utf-8 string)
author (utf-8 string)
html (utf-8 string)
url (utf-8 string)
is_saved (boolean integer)
is_read (boolean integer)
created_on_time (Unix timestamp/integer)
Most servers won’t have enough memory allocated to PHP to dump all items at once.
Three optional arguments control determine the items included in the response.
Use the since_id argument with the highest id of locally cached items to request 50 additional items.
Repeat until the items array in the response is empty.
Use the max_id argument with the lowest id of locally cached items (or 0 initially) to request 50 previous items.
Repeat until the items array in the response is empty. (added in API version 2)
Use the with_ids argument with a comma-separated list of item ids to request (a maximum of 50) specific items.
(added in API version 2)
*/
func (h *handler) handleItems(w http.ResponseWriter, r *http.Request) {
var result itemsResponse
userID := request.UserID(r)
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithLimit(50)
switch {
case request.HasQueryParam(r, "since_id"):
sinceID := request.QueryInt64Param(r, "since_id", 0)
if sinceID > 0 {
slog.Debug("[Fever] Fetching items since a given date",
slog.Int64("user_id", userID),
slog.Int64("since_id", sinceID),
)
builder.AfterEntryID(sinceID)
builder.WithSorting("id", "ASC")
}
case request.HasQueryParam(r, "max_id"):
maxID := request.QueryInt64Param(r, "max_id", 0)
if maxID == 0 {
slog.Debug("[Fever] Fetching most recent items",
slog.Int64("user_id", userID),
)
builder.WithSorting("id", "DESC")
} else if maxID > 0 {
slog.Debug("[Fever] Fetching items before a given item ID",
slog.Int64("user_id", userID),
slog.Int64("max_id", maxID),
)
builder.BeforeEntryID(maxID)
builder.WithSorting("id", "DESC")
}
case request.HasQueryParam(r, "with_ids"):
csvItemIDs := request.QueryStringParam(r, "with_ids", "")
if csvItemIDs != "" {
var itemIDs []int64
for _, strItemID := range strings.Split(csvItemIDs, ",") {
strItemID = strings.TrimSpace(strItemID)
itemID, _ := strconv.ParseInt(strItemID, 10, 64)
itemIDs = append(itemIDs, itemID)
}
builder.WithEntryIDs(itemIDs)
}
default:
slog.Debug("[Fever] Fetching oldest items",
slog.Int64("user_id", userID),
)
}
entries, err := builder.GetEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
builder = h.store.NewEntryQueryBuilder(userID)
builder.WithoutStatus(model.EntryStatusRemoved)
result.Total, err = builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
result.Items = make([]item, 0)
for _, entry := range entries {
isRead := 0
if entry.Status == model.EntryStatusRead {
isRead = 1
}
isSaved := 0
if entry.Starred {
isSaved = 1
}
result.Items = append(result.Items, item{
ID: entry.ID,
FeedID: entry.FeedID,
Title: entry.Title,
Author: entry.Author,
HTML: mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, entry.Content),
URL: entry.URL,
IsSaved: isSaved,
IsRead: isRead,
CreatedAt: entry.Date.Unix(),
})
}
result.SetCommonValues()
json.OK(w, r, result)
}
/*
The unread_item_ids and saved_item_ids arguments can be used to keep your local cache synced
with the remote Fever installation.
A request with the unread_item_ids argument will return one additional member:
unread_item_ids (string/comma-separated list of positive integers)
*/
func (h *handler) handleUnreadItems(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Fetching unread items",
slog.Int64("user_id", userID),
)
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithStatus(model.EntryStatusUnread)
rawEntryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemIDs []string
for _, entryID := range rawEntryIDs {
itemIDs = append(itemIDs, strconv.FormatInt(entryID, 10))
}
var result unreadResponse
result.ItemIDs = strings.Join(itemIDs, ",")
result.SetCommonValues()
json.OK(w, r, result)
}
/*
The unread_item_ids and saved_item_ids arguments can be used to keep your local cache synced
with the remote Fever installation.
A request with the saved_item_ids argument will return one additional member:
saved_item_ids (string/comma-separated list of positive integers)
*/
func (h *handler) handleSavedItems(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Fetching saved items",
slog.Int64("user_id", userID),
)
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithStarred(true)
entryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemsIDs []string
for _, entryID := range entryIDs {
itemsIDs = append(itemsIDs, strconv.FormatInt(entryID, 10))
}
result := &savedResponse{ItemIDs: strings.Join(itemsIDs, ",")}
result.SetCommonValues()
json.OK(w, r, result)
}
/*
mark=item
as=? where ? is replaced with read, saved or unsaved
id=? where ? is replaced with the id of the item to modify
*/
func (h *handler) handleWriteItems(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
slog.Debug("[Fever] Receiving mark=item call",
slog.Int64("user_id", userID),
)
entryID := request.FormInt64Value(r, "id")
if entryID <= 0 {
return
}
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithEntryID(entryID)
builder.WithoutStatus(model.EntryStatusRemoved)
entry, err := builder.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
}
if entry == nil {
slog.Debug("[Fever] Entry not found",
slog.Int64("user_id", userID),
slog.Int64("entry_id", entryID),
)
json.OK(w, r, newBaseResponse())
return
}
switch r.FormValue("as") {
case "read":
slog.Debug("[Fever] Mark entry as read",
slog.Int64("user_id", userID),
slog.Int64("entry_id", entryID),
)
h.store.SetEntriesStatus(userID, []int64{entryID}, model.EntryStatusRead)
case "unread":
slog.Debug("[Fever] Mark entry as unread",
slog.Int64("user_id", userID),
slog.Int64("entry_id", entryID),
)
h.store.SetEntriesStatus(userID, []int64{entryID}, model.EntryStatusUnread)
case "saved":
slog.Debug("[Fever] Mark entry as saved",
slog.Int64("user_id", userID),
slog.Int64("entry_id", entryID),
)
if err := h.store.ToggleBookmark(userID, entryID); err != nil {
json.ServerError(w, r, err)
return
}
settings, err := h.store.Integration(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
go func() {
integration.SendEntry(entry, settings)
}()
case "unsaved":
slog.Debug("[Fever] Mark entry as unsaved",
slog.Int64("user_id", userID),
slog.Int64("entry_id", entryID),
)
if err := h.store.ToggleBookmark(userID, entryID); err != nil {
json.ServerError(w, r, err)
return
}
}
json.OK(w, r, newBaseResponse())
}
/*
mark=feed
as=read
id=? where ? is replaced with the id of the feed or group to modify
before=? where ? is replaced with the Unix timestamp of the the local client’s most recent items API request
*/
func (h *handler) handleWriteFeeds(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
feedID := request.FormInt64Value(r, "id")
before := time.Unix(request.FormInt64Value(r, "before"), 0)
slog.Debug("[Fever] Mark feed as read before a given date",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Time("before_ts", before),
)
if feedID <= 0 {
return
}
go func() {
if err := h.store.MarkFeedAsRead(userID, feedID, before); err != nil {
slog.Error("[Fever] Unable to mark feed as read",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Time("before_ts", before),
slog.Any("error", err),
)
}
}()
json.OK(w, r, newBaseResponse())
}
/*
mark=group
as=read
id=? where ? is replaced with the id of the feed or group to modify
before=? where ? is replaced with the Unix timestamp of the the local client’s most recent items API request
*/
func (h *handler) handleWriteGroups(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
groupID := request.FormInt64Value(r, "id")
before := time.Unix(request.FormInt64Value(r, "before"), 0)
slog.Debug("[Fever] Mark group as read before a given date",
slog.Int64("user_id", userID),
slog.Int64("group_id", groupID),
slog.Time("before_ts", before),
)
if groupID < 0 {
return
}
go func() {
var err error
if groupID == 0 {
err = h.store.MarkAllAsRead(userID)
} else {
err = h.store.MarkCategoryAsRead(userID, groupID, before)
}
if err != nil {
slog.Error("[Fever] Unable to mark group as read",
slog.Int64("user_id", userID),
slog.Int64("group_id", groupID),
slog.Time("before_ts", before),
slog.Any("error", err),
)
}
}()
json.OK(w, r, newBaseResponse())
}
/*
A feeds_group object has the following members:
group_id (positive integer)
feed_ids (string/comma-separated list of positive integers)
*/
func (h *handler) buildFeedGroups(feeds model.Feeds) []feedsGroups {
feedsGroupedByCategory := make(map[int64][]string)
for _, feed := range feeds {
feedsGroupedByCategory[feed.Category.ID] = append(feedsGroupedByCategory[feed.Category.ID], strconv.FormatInt(feed.ID, 10))
}
result := make([]feedsGroups, 0)
for categoryID, feedIDs := range feedsGroupedByCategory {
result = append(result, feedsGroups{
GroupID: categoryID,
FeedIDs: strings.Join(feedIDs, ","),
})
}
return result
}
v2-2.2.6/internal/fever/middleware.go 0000664 0000000 0000000 00000004266 14756465373 0017473 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package fever // import "miniflux.app/v2/internal/fever"
import (
"context"
"log/slog"
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/storage"
)
type middleware struct {
store *storage.Storage
}
func newMiddleware(s *storage.Storage) *middleware {
return &middleware{s}
}
func (m *middleware) serve(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
apiKey := r.FormValue("api_key")
if apiKey == "" {
slog.Warn("[Fever] No API key provided",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.OK(w, r, newAuthFailureResponse())
return
}
user, err := m.store.UserByFeverToken(apiKey)
if err != nil {
slog.Error("[Fever] Unable to fetch user by API key",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("error", err),
)
json.OK(w, r, newAuthFailureResponse())
return
}
if user == nil {
slog.Warn("[Fever] No user found with the API key provided",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.OK(w, r, newAuthFailureResponse())
return
}
slog.Info("[Fever] User authenticated successfully",
slog.Bool("authentication_successful", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", user.ID),
slog.String("username", user.Username),
)
m.store.SetLastLogin(user.ID)
ctx := r.Context()
ctx = context.WithValue(ctx, request.UserIDContextKey, user.ID)
ctx = context.WithValue(ctx, request.UserTimezoneContextKey, user.Timezone)
ctx = context.WithValue(ctx, request.IsAdminUserContextKey, user.IsAdmin)
ctx = context.WithValue(ctx, request.IsAuthenticatedContextKey, true)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
v2-2.2.6/internal/fever/response.go 0000664 0000000 0000000 00000005404 14756465373 0017207 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package fever // import "miniflux.app/v2/internal/fever"
import (
"time"
)
type baseResponse struct {
Version int `json:"api_version"`
Authenticated int `json:"auth"`
LastRefresh int64 `json:"last_refreshed_on_time"`
}
func (b *baseResponse) SetCommonValues() {
b.Version = 3
b.Authenticated = 1
b.LastRefresh = time.Now().Unix()
}
/*
The default response is a JSON object containing two members:
api_version contains the version of the API responding (positive integer)
auth whether the request was successfully authenticated (boolean integer)
The API can also return XML by passing xml as the optional value of the api argument like so:
http://yourdomain.com/fever/?api=xml
The top level XML element is named response.
The response to each successfully authenticated request will have auth set to 1 and include
at least one additional member:
last_refreshed_on_time contains the time of the most recently refreshed (not updated)
feed (Unix timestamp/integer)
*/
func newBaseResponse() baseResponse {
r := baseResponse{}
r.SetCommonValues()
return r
}
func newAuthFailureResponse() baseResponse {
return baseResponse{Version: 3, Authenticated: 0}
}
type groupsResponse struct {
baseResponse
Groups []group `json:"groups"`
FeedsGroups []feedsGroups `json:"feeds_groups"`
}
type feedsResponse struct {
baseResponse
Feeds []feed `json:"feeds"`
FeedsGroups []feedsGroups `json:"feeds_groups"`
}
type faviconsResponse struct {
baseResponse
Favicons []favicon `json:"favicons"`
}
type itemsResponse struct {
baseResponse
Items []item `json:"items"`
Total int `json:"total_items"`
}
type unreadResponse struct {
baseResponse
ItemIDs string `json:"unread_item_ids"`
}
type savedResponse struct {
baseResponse
ItemIDs string `json:"saved_item_ids"`
}
type group struct {
ID int64 `json:"id"`
Title string `json:"title"`
}
type feedsGroups struct {
GroupID int64 `json:"group_id"`
FeedIDs string `json:"feed_ids"`
}
type feed struct {
ID int64 `json:"id"`
FaviconID int64 `json:"favicon_id"`
Title string `json:"title"`
URL string `json:"url"`
SiteURL string `json:"site_url"`
IsSpark int `json:"is_spark"`
LastUpdated int64 `json:"last_updated_on_time"`
}
type item struct {
ID int64 `json:"id"`
FeedID int64 `json:"feed_id"`
Title string `json:"title"`
Author string `json:"author"`
HTML string `json:"html"`
URL string `json:"url"`
IsSaved int `json:"is_saved"`
IsRead int `json:"is_read"`
CreatedAt int64 `json:"created_on_time"`
}
type favicon struct {
ID int64 `json:"id"`
Data string `json:"data"`
}
v2-2.2.6/internal/googlereader/ 0000775 0000000 0000000 00000000000 14756465373 0016347 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/googlereader/handler.go 0000664 0000000 0000000 00000124056 14756465373 0020323 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package googlereader // import "miniflux.app/v2/internal/googlereader"
import (
"errors"
"fmt"
"log/slog"
"net/http"
"strconv"
"strings"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response"
"miniflux.app/v2/internal/http/response/json"
"miniflux.app/v2/internal/http/route"
"miniflux.app/v2/internal/integration"
"miniflux.app/v2/internal/mediaproxy"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/fetcher"
mff "miniflux.app/v2/internal/reader/handler"
mfs "miniflux.app/v2/internal/reader/subscription"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/validator"
"github.com/gorilla/mux"
)
type handler struct {
store *storage.Storage
router *mux.Router
}
const (
// StreamPrefix is the prefix for astreams (read/starred/reading list and so on)
StreamPrefix = "user/-/state/com.google/"
// UserStreamPrefix is the user specific prefix for streams (read/starred/reading list and so on)
UserStreamPrefix = "user/%d/state/com.google/"
// LabelPrefix is the prefix for a label stream
LabelPrefix = "user/-/label/"
// UserLabelPrefix is the user specific prefix prefix for a label stream
UserLabelPrefix = "user/%d/label/"
// FeedPrefix is the prefix for a feed stream
FeedPrefix = "feed/"
// Read is the suffix for read stream
Read = "read"
// Starred is the suffix for starred stream
Starred = "starred"
// ReadingList is the suffix for reading list stream
ReadingList = "reading-list"
// KeptUnread is the suffix for kept unread stream
KeptUnread = "kept-unread"
// Broadcast is the suffix for broadcast stream
Broadcast = "broadcast"
// BroadcastFriends is the suffix for broadcast friends stream
BroadcastFriends = "broadcast-friends"
// Like is the suffix for like stream
Like = "like"
// EntryIDLong is the long entry id representation
EntryIDLong = "tag:google.com,2005:reader/item/%016x"
)
const (
// ParamItemIDs - name of the parameter with the item ids
ParamItemIDs = "i"
// ParamStreamID - name of the parameter containing the stream to be included
ParamStreamID = "s"
// ParamStreamExcludes - name of the parameter containing streams to be excluded
ParamStreamExcludes = "xt"
// ParamStreamFilters - name of the parameter containing streams to be included
ParamStreamFilters = "it"
// ParamStreamMaxItems - name of the parameter containing number of items per page/max items returned
ParamStreamMaxItems = "n"
// ParamStreamOrder - name of the parameter containing the sort criteria
ParamStreamOrder = "r"
// ParamStreamStartTime - name of the parameter containing epoch timestamp, filtering items older than
ParamStreamStartTime = "ot"
// ParamStreamStopTime - name of the parameter containing epoch timestamp, filtering items newer than
ParamStreamStopTime = "nt"
// ParamTagsRemove - name of the parameter containing tags (streams) to be removed
ParamTagsRemove = "r"
// ParamTagsAdd - name of the parameter containing tags (streams) to be added
ParamTagsAdd = "a"
// ParamSubscribeAction - name of the parameter indicating the action to take for subscription/edit
ParamSubscribeAction = "ac"
// ParamTitle - name of the parameter for the title of the subscription
ParamTitle = "t"
// ParamQuickAdd - name of the parameter for a URL being quick subscribed to
ParamQuickAdd = "quickadd"
// ParamDestination - name of the parameter for the new name of a tag
ParamDestination = "dest"
// ParamContinuation - name of the parameter for callers to pass to receive the next page of results
ParamContinuation = "c"
)
// StreamType represents the possible stream types
type StreamType int
const (
// NoStream - no stream type
NoStream StreamType = iota
// ReadStream - read stream type
ReadStream
// StarredStream - starred stream type
StarredStream
// ReadingListStream - reading list stream type
ReadingListStream
// KeptUnreadStream - kept unread stream type
KeptUnreadStream
// BroadcastStream - broadcast stream type
BroadcastStream
// BroadcastFriendsStream - broadcast friends stream type
BroadcastFriendsStream
// LabelStream - label stream type
LabelStream
// FeedStream - feed stream type
FeedStream
// LikeStream - like stream type
LikeStream
)
// Stream defines a stream type and its ID.
type Stream struct {
Type StreamType
ID string
}
func (s Stream) String() string {
return fmt.Sprintf("%v - '%s'", s.Type, s.ID)
}
func (st StreamType) String() string {
switch st {
case NoStream:
return "NoStream"
case ReadStream:
return "ReadStream"
case StarredStream:
return "StarredStream"
case ReadingListStream:
return "ReadingListStream"
case KeptUnreadStream:
return "KeptUnreadStream"
case BroadcastStream:
return "BroadcastStream"
case BroadcastFriendsStream:
return "BroadcastFriendsStream"
case LabelStream:
return "LabelStream"
case FeedStream:
return "FeedStream"
case LikeStream:
return "LikeStream"
default:
return st.String()
}
}
// RequestModifiers are the parsed request parameters.
type RequestModifiers struct {
ExcludeTargets []Stream
FilterTargets []Stream
Streams []Stream
Count int
Offset int
SortDirection string
StartTime int64
StopTime int64
ContinuationToken string
UserID int64
}
func (r RequestModifiers) String() string {
var results []string
results = append(results, fmt.Sprintf("UserID: %d", r.UserID))
var streamStr []string
for _, s := range r.Streams {
streamStr = append(streamStr, s.String())
}
results = append(results, fmt.Sprintf("Streams: [%s]", strings.Join(streamStr, ", ")))
var exclusions []string
for _, s := range r.ExcludeTargets {
exclusions = append(exclusions, s.String())
}
results = append(results, fmt.Sprintf("Exclusions: [%s]", strings.Join(exclusions, ", ")))
var filters []string
for _, s := range r.FilterTargets {
filters = append(filters, s.String())
}
results = append(results, fmt.Sprintf("Filters: [%s]", strings.Join(filters, ", ")))
results = append(results, fmt.Sprintf("Count: %d", r.Count))
results = append(results, fmt.Sprintf("Offset: %d", r.Offset))
results = append(results, fmt.Sprintf("Sort Direction: %s", r.SortDirection))
results = append(results, fmt.Sprintf("Continuation Token: %s", r.ContinuationToken))
results = append(results, fmt.Sprintf("Start Time: %d", r.StartTime))
results = append(results, fmt.Sprintf("Stop Time: %d", r.StopTime))
return strings.Join(results, "; ")
}
// Serve handles Google Reader API calls.
func Serve(router *mux.Router, store *storage.Storage) {
handler := &handler{store, router}
router.HandleFunc("/accounts/ClientLogin", handler.clientLoginHandler).Methods(http.MethodPost).Name("ClientLogin")
middleware := newMiddleware(store)
sr := router.PathPrefix("/reader/api/0").Subrouter()
sr.Use(middleware.handleCORS)
sr.Use(middleware.apiKeyAuth)
sr.Methods(http.MethodOptions)
sr.HandleFunc("/token", handler.tokenHandler).Methods(http.MethodGet).Name("Token")
sr.HandleFunc("/edit-tag", handler.editTagHandler).Methods(http.MethodPost).Name("EditTag")
sr.HandleFunc("/rename-tag", handler.renameTagHandler).Methods(http.MethodPost).Name("Rename Tag")
sr.HandleFunc("/disable-tag", handler.disableTagHandler).Methods(http.MethodPost).Name("Disable Tag")
sr.HandleFunc("/tag/list", handler.tagListHandler).Methods(http.MethodGet).Name("TagList")
sr.HandleFunc("/user-info", handler.userInfoHandler).Methods(http.MethodGet).Name("UserInfo")
sr.HandleFunc("/subscription/list", handler.subscriptionListHandler).Methods(http.MethodGet).Name("SubscriptonList")
sr.HandleFunc("/subscription/edit", handler.editSubscriptionHandler).Methods(http.MethodPost).Name("SubscriptionEdit")
sr.HandleFunc("/subscription/quickadd", handler.quickAddHandler).Methods(http.MethodPost).Name("QuickAdd")
sr.HandleFunc("/stream/items/ids", handler.streamItemIDsHandler).Methods(http.MethodGet).Name("StreamItemIDs")
sr.HandleFunc("/stream/items/contents", handler.streamItemContentsHandler).Methods(http.MethodPost).Name("StreamItemsContents")
sr.PathPrefix("/").HandlerFunc(handler.serveHandler).Methods(http.MethodPost, http.MethodGet).Name("GoogleReaderApiEndpoint")
}
func getStreamFilterModifiers(r *http.Request) (RequestModifiers, error) {
userID := request.UserID(r)
result := RequestModifiers{
SortDirection: "desc",
UserID: userID,
}
streamOrder := request.QueryStringParam(r, ParamStreamOrder, "d")
if streamOrder == "o" {
result.SortDirection = "asc"
}
var err error
result.Streams, err = getStreams(request.QueryStringParamList(r, ParamStreamID), userID)
if err != nil {
return RequestModifiers{}, err
}
result.ExcludeTargets, err = getStreams(request.QueryStringParamList(r, ParamStreamExcludes), userID)
if err != nil {
return RequestModifiers{}, err
}
result.FilterTargets, err = getStreams(request.QueryStringParamList(r, ParamStreamFilters), userID)
if err != nil {
return RequestModifiers{}, err
}
result.Count = request.QueryIntParam(r, ParamStreamMaxItems, 0)
result.Offset = request.QueryIntParam(r, ParamContinuation, 0)
result.StartTime = request.QueryInt64Param(r, ParamStreamStartTime, int64(0))
result.StopTime = request.QueryInt64Param(r, ParamStreamStopTime, int64(0))
return result, nil
}
func getStream(streamID string, userID int64) (Stream, error) {
switch {
case strings.HasPrefix(streamID, FeedPrefix):
return Stream{Type: FeedStream, ID: strings.TrimPrefix(streamID, FeedPrefix)}, nil
case strings.HasPrefix(streamID, fmt.Sprintf(UserStreamPrefix, userID)) || strings.HasPrefix(streamID, StreamPrefix):
id := strings.TrimPrefix(streamID, fmt.Sprintf(UserStreamPrefix, userID))
id = strings.TrimPrefix(id, StreamPrefix)
switch id {
case Read:
return Stream{ReadStream, ""}, nil
case Starred:
return Stream{StarredStream, ""}, nil
case ReadingList:
return Stream{ReadingListStream, ""}, nil
case KeptUnread:
return Stream{KeptUnreadStream, ""}, nil
case Broadcast:
return Stream{BroadcastStream, ""}, nil
case BroadcastFriends:
return Stream{BroadcastFriendsStream, ""}, nil
case Like:
return Stream{LikeStream, ""}, nil
default:
return Stream{NoStream, ""}, fmt.Errorf("googlereader: unknown stream with id: %s", id)
}
case strings.HasPrefix(streamID, fmt.Sprintf(UserLabelPrefix, userID)) || strings.HasPrefix(streamID, LabelPrefix):
id := strings.TrimPrefix(streamID, fmt.Sprintf(UserLabelPrefix, userID))
id = strings.TrimPrefix(id, LabelPrefix)
return Stream{LabelStream, id}, nil
case streamID == "":
return Stream{NoStream, ""}, nil
default:
return Stream{NoStream, ""}, fmt.Errorf("googlereader: unknown stream type: %s", streamID)
}
}
func getStreams(streamIDs []string, userID int64) ([]Stream, error) {
streams := make([]Stream, 0)
for _, streamID := range streamIDs {
stream, err := getStream(streamID, userID)
if err != nil {
return []Stream{}, err
}
streams = append(streams, stream)
}
return streams, nil
}
func checkAndSimplifyTags(addTags []Stream, removeTags []Stream) (map[StreamType]bool, error) {
tags := make(map[StreamType]bool)
for _, s := range addTags {
switch s.Type {
case ReadStream:
if _, ok := tags[KeptUnreadStream]; ok {
return nil, fmt.Errorf("googlereader: %s ad %s should not be supplied simultaneously", KeptUnread, Read)
}
tags[ReadStream] = true
case KeptUnreadStream:
if _, ok := tags[ReadStream]; ok {
return nil, fmt.Errorf("googlereader: %s ad %s should not be supplied simultaneously", KeptUnread, Read)
}
tags[ReadStream] = false
case StarredStream:
tags[StarredStream] = true
case BroadcastStream, LikeStream:
slog.Debug("Broadcast & Like tags are not implemented!")
default:
return nil, fmt.Errorf("googlereader: unsupported tag type: %s", s.Type)
}
}
for _, s := range removeTags {
switch s.Type {
case ReadStream:
if _, ok := tags[ReadStream]; ok {
return nil, fmt.Errorf("googlereader: %s ad %s should not be supplied simultaneously", KeptUnread, Read)
}
tags[ReadStream] = false
case KeptUnreadStream:
if _, ok := tags[ReadStream]; ok {
return nil, fmt.Errorf("googlereader: %s ad %s should not be supplied simultaneously", KeptUnread, Read)
}
tags[ReadStream] = true
case StarredStream:
if _, ok := tags[StarredStream]; ok {
return nil, fmt.Errorf("googlereader: %s should not be supplied for add and remove simultaneously", Starred)
}
tags[StarredStream] = false
case BroadcastStream, LikeStream:
slog.Debug("Broadcast & Like tags are not implemented!")
default:
return nil, fmt.Errorf("googlereader: unsupported tag type: %s", s.Type)
}
}
return tags, nil
}
func getItemIDs(r *http.Request) ([]int64, error) {
items := r.Form[ParamItemIDs]
if len(items) == 0 {
return nil, fmt.Errorf("googlereader: no items requested")
}
itemIDs := make([]int64, len(items))
for i, item := range items {
var itemID int64
_, err := fmt.Sscanf(item, EntryIDLong, &itemID)
if err != nil {
itemID, err = strconv.ParseInt(item, 16, 64)
if err != nil {
return nil, fmt.Errorf("googlereader: could not parse item: %v", item)
}
}
itemIDs[i] = itemID
}
return itemIDs, nil
}
func checkOutputFormat(r *http.Request) error {
var output string
if r.Method == http.MethodPost {
err := r.ParseForm()
if err != nil {
return err
}
output = r.Form.Get("output")
} else {
output = request.QueryStringParam(r, "output", "")
}
if output != "json" {
err := fmt.Errorf("googlereader: only json output is supported")
return err
}
return nil
}
func (h *handler) clientLoginHandler(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /accounts/ClientLogin",
slog.String("handler", "clientLoginHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
if err := r.ParseForm(); err != nil {
slog.Warn("[GoogleReader] Could not parse request form data",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("error", err),
)
json.Unauthorized(w, r)
return
}
username := r.Form.Get("Email")
password := r.Form.Get("Passwd")
output := r.Form.Get("output")
if username == "" || password == "" {
slog.Warn("[GoogleReader] Empty username or password",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.Unauthorized(w, r)
return
}
if err := h.store.GoogleReaderUserCheckPassword(username, password); err != nil {
slog.Warn("[GoogleReader] Invalid username or password",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
slog.Any("error", err),
)
json.Unauthorized(w, r)
return
}
slog.Info("[GoogleReader] User authenticated successfully",
slog.Bool("authentication_successful", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
)
integration, err := h.store.GoogleReaderUserGetIntegration(username)
if err != nil {
json.ServerError(w, r, err)
return
}
h.store.SetLastLogin(integration.UserID)
token := getAuthToken(integration.GoogleReaderUsername, integration.GoogleReaderPassword)
slog.Debug("[GoogleReader] Created token",
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("username", username),
)
result := login{SID: token, LSID: token, Auth: token}
if output == "json" {
json.OK(w, r, result)
return
}
builder := response.New(w, r)
builder.WithHeader("Content-Type", "text/plain; charset=UTF-8")
builder.WithBody(result.String())
builder.Write()
}
func (h *handler) tokenHandler(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /token",
slog.String("handler", "tokenHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
if !request.IsAuthenticated(r) {
slog.Warn("[GoogleReader] User is not authenticated",
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.Unauthorized(w, r)
return
}
token := request.GoolgeReaderToken(r)
if token == "" {
slog.Warn("[GoogleReader] User does not have token",
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", request.UserID(r)),
)
json.Unauthorized(w, r)
return
}
slog.Debug("[GoogleReader] Token handler",
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", request.UserID(r)),
slog.String("token", token),
)
w.Header().Add("Content-Type", "text/plain; charset=UTF-8")
w.WriteHeader(http.StatusOK)
w.Write([]byte(token))
}
func (h *handler) editTagHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /edit-tag",
slog.String("handler", "editTagHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
if err := r.ParseForm(); err != nil {
json.ServerError(w, r, err)
return
}
addTags, err := getStreams(r.PostForm[ParamTagsAdd], userID)
if err != nil {
json.ServerError(w, r, err)
return
}
removeTags, err := getStreams(r.PostForm[ParamTagsRemove], userID)
if err != nil {
json.ServerError(w, r, err)
return
}
if len(addTags) == 0 && len(removeTags) == 0 {
err = fmt.Errorf("googlreader: add or/and remove tags should be supplied")
json.ServerError(w, r, err)
return
}
tags, err := checkAndSimplifyTags(addTags, removeTags)
if err != nil {
json.ServerError(w, r, err)
return
}
itemIDs, err := getItemIDs(r)
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Debug("[GoogleReader] Edited tags",
slog.String("handler", "editTagHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
slog.Any("item_ids", itemIDs),
slog.Any("tags", tags),
)
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithEntryIDs(itemIDs)
builder.WithoutStatus(model.EntryStatusRemoved)
entries, err := builder.GetEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
n := 0
readEntryIDs := make([]int64, 0)
unreadEntryIDs := make([]int64, 0)
starredEntryIDs := make([]int64, 0)
unstarredEntryIDs := make([]int64, 0)
for _, entry := range entries {
if read, exists := tags[ReadStream]; exists {
if read && entry.Status == model.EntryStatusUnread {
readEntryIDs = append(readEntryIDs, entry.ID)
} else if entry.Status == model.EntryStatusRead {
unreadEntryIDs = append(unreadEntryIDs, entry.ID)
}
}
if starred, exists := tags[StarredStream]; exists {
if starred && !entry.Starred {
starredEntryIDs = append(starredEntryIDs, entry.ID)
// filter the original array
entries[n] = entry
n++
} else if entry.Starred {
unstarredEntryIDs = append(unstarredEntryIDs, entry.ID)
}
}
}
entries = entries[:n]
if len(readEntryIDs) > 0 {
err = h.store.SetEntriesStatus(userID, readEntryIDs, model.EntryStatusRead)
if err != nil {
json.ServerError(w, r, err)
return
}
}
if len(unreadEntryIDs) > 0 {
err = h.store.SetEntriesStatus(userID, unreadEntryIDs, model.EntryStatusUnread)
if err != nil {
json.ServerError(w, r, err)
return
}
}
if len(unstarredEntryIDs) > 0 {
err = h.store.SetEntriesBookmarkedState(userID, unstarredEntryIDs, false)
if err != nil {
json.ServerError(w, r, err)
return
}
}
if len(starredEntryIDs) > 0 {
err = h.store.SetEntriesBookmarkedState(userID, starredEntryIDs, true)
if err != nil {
json.ServerError(w, r, err)
return
}
}
if len(entries) > 0 {
settings, err := h.store.Integration(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
for _, entry := range entries {
e := entry
go func() {
integration.SendEntry(e, settings)
}()
}
}
OK(w, r)
}
func (h *handler) quickAddHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /subscription/quickadd",
slog.String("handler", "quickAddHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
err := r.ParseForm()
if err != nil {
json.BadRequest(w, r, err)
return
}
feedURL := r.Form.Get(ParamQuickAdd)
if !validator.IsValidURL(feedURL) {
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid URL: %s", feedURL))
return
}
requestBuilder := fetcher.NewRequestBuilder()
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
var rssBridgeURL string
if intg, err := h.store.Integration(userID); err == nil && intg != nil && intg.RSSBridgeEnabled {
rssBridgeURL = intg.RSSBridgeURL
}
subscriptions, localizedError := mfs.NewSubscriptionFinder(requestBuilder).FindSubscriptions(feedURL, rssBridgeURL)
if localizedError != nil {
json.ServerError(w, r, localizedError.Error())
return
}
if len(subscriptions) == 0 {
json.OK(w, r, quickAddResponse{
NumResults: 0,
})
return
}
toSubscribe := Stream{FeedStream, subscriptions[0].URL}
category := Stream{NoStream, ""}
newFeed, err := subscribe(toSubscribe, category, "", h.store, userID)
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Debug("[GoogleReader] Added a new feed",
slog.String("handler", "quickAddHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
slog.String("feed_url", newFeed.FeedURL),
)
json.OK(w, r, quickAddResponse{
NumResults: 1,
Query: newFeed.FeedURL,
StreamID: fmt.Sprintf(FeedPrefix+"%d", newFeed.ID),
StreamName: newFeed.Title,
})
}
func getFeed(stream Stream, store *storage.Storage, userID int64) (*model.Feed, error) {
feedID, err := strconv.ParseInt(stream.ID, 10, 64)
if err != nil {
return nil, err
}
return store.FeedByID(userID, feedID)
}
func getOrCreateCategory(category Stream, store *storage.Storage, userID int64) (*model.Category, error) {
switch {
case category.ID == "":
return store.FirstCategory(userID)
case store.CategoryTitleExists(userID, category.ID):
return store.CategoryByTitle(userID, category.ID)
default:
catRequest := model.CategoryRequest{
Title: category.ID,
}
return store.CreateCategory(userID, &catRequest)
}
}
func subscribe(newFeed Stream, category Stream, title string, store *storage.Storage, userID int64) (*model.Feed, error) {
destCategory, err := getOrCreateCategory(category, store, userID)
if err != nil {
return nil, err
}
feedRequest := model.FeedCreationRequest{
FeedURL: newFeed.ID,
CategoryID: destCategory.ID,
}
verr := validator.ValidateFeedCreation(store, userID, &feedRequest)
if verr != nil {
return nil, verr.Error()
}
created, localizedError := mff.CreateFeed(store, userID, &feedRequest)
if localizedError != nil {
return nil, localizedError.Error()
}
if title != "" {
feedModification := model.FeedModificationRequest{
Title: &title,
}
feedModification.Patch(created)
if err := store.UpdateFeed(created); err != nil {
return nil, err
}
}
return created, nil
}
func unsubscribe(streams []Stream, store *storage.Storage, userID int64) error {
for _, stream := range streams {
feedID, err := strconv.ParseInt(stream.ID, 10, 64)
if err != nil {
return err
}
err = store.RemoveFeed(userID, feedID)
if err != nil {
return err
}
}
return nil
}
func rename(stream Stream, title string, store *storage.Storage, userID int64) error {
if title == "" {
return errors.New("empty title")
}
feed, err := getFeed(stream, store, userID)
if err != nil {
return err
}
feedModification := model.FeedModificationRequest{
Title: &title,
}
feedModification.Patch(feed)
return store.UpdateFeed(feed)
}
func move(stream Stream, destination Stream, store *storage.Storage, userID int64) error {
feed, err := getFeed(stream, store, userID)
if err != nil {
return err
}
category, err := getOrCreateCategory(destination, store, userID)
if err != nil {
return err
}
feedModification := model.FeedModificationRequest{
CategoryID: &category.ID,
}
feedModification.Patch(feed)
return store.UpdateFeed(feed)
}
func (h *handler) editSubscriptionHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /subscription/edit",
slog.String("handler", "editSubscriptionHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
if err := r.ParseForm(); err != nil {
json.BadRequest(w, r, err)
return
}
streamIds, err := getStreams(r.Form[ParamStreamID], userID)
if err != nil || len(streamIds) == 0 {
json.BadRequest(w, r, errors.New("googlereader: no valid stream IDs provided"))
return
}
newLabel, err := getStream(r.Form.Get(ParamTagsAdd), userID)
if err != nil {
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid data in %s", ParamTagsAdd))
return
}
title := r.Form.Get(ParamTitle)
action := r.Form.Get(ParamSubscribeAction)
switch action {
case "subscribe":
_, err := subscribe(streamIds[0], newLabel, title, h.store, userID)
if err != nil {
json.ServerError(w, r, err)
return
}
case "unsubscribe":
err := unsubscribe(streamIds, h.store, userID)
if err != nil {
json.ServerError(w, r, err)
return
}
case "edit":
if title != "" {
if err := rename(streamIds[0], title, h.store, userID); err != nil {
json.ServerError(w, r, err)
return
}
}
if r.Form.Has(ParamTagsAdd) {
if newLabel.Type != LabelStream {
json.BadRequest(w, r, errors.New("destination must be a label"))
return
}
if err := move(streamIds[0], newLabel, h.store, userID); err != nil {
json.ServerError(w, r, err)
return
}
}
default:
json.ServerError(w, r, fmt.Errorf("googlereader: unrecognized action %s", action))
return
}
OK(w, r)
}
func (h *handler) streamItemContentsHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /stream/items/contents",
slog.String("handler", "streamItemContentsHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
if err := checkOutputFormat(r); err != nil {
json.BadRequest(w, r, err)
return
}
err := r.ParseForm()
if err != nil {
json.ServerError(w, r, err)
return
}
var user *model.User
if user, err = h.store.UserByID(userID); err != nil {
json.ServerError(w, r, err)
return
}
requestModifiers, err := getStreamFilterModifiers(r)
if err != nil {
json.ServerError(w, r, err)
return
}
userReadingList := fmt.Sprintf(UserStreamPrefix, userID) + ReadingList
userRead := fmt.Sprintf(UserStreamPrefix, userID) + Read
userStarred := fmt.Sprintf(UserStreamPrefix, userID) + Starred
itemIDs, err := getItemIDs(r)
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Debug("[GoogleReader] Fetching item contents",
slog.String("handler", "streamItemContentsHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
slog.Any("item_ids", itemIDs),
)
builder := h.store.NewEntryQueryBuilder(userID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithEntryIDs(itemIDs)
builder.WithSorting(model.DefaultSortingOrder, requestModifiers.SortDirection)
entries, err := builder.GetEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
if len(entries) == 0 {
json.BadRequest(w, r, fmt.Errorf("googlereader: no items returned from the database for item IDs: %v", itemIDs))
return
}
result := streamContentItems{
Direction: "ltr",
ID: fmt.Sprintf("feed/%d", entries[0].FeedID),
Title: entries[0].Feed.Title,
Alternate: []contentHREFType{
{
HREF: entries[0].Feed.SiteURL,
Type: "text/html",
},
},
Updated: time.Now().Unix(),
Self: []contentHREF{
{
HREF: config.Opts.RootURL() + route.Path(h.router, "StreamItemsContents"),
},
},
Author: user.Username,
}
contentItems := make([]contentItem, len(entries))
for i, entry := range entries {
enclosures := make([]contentItemEnclosure, 0, len(entry.Enclosures))
for _, enclosure := range entry.Enclosures {
enclosures = append(enclosures, contentItemEnclosure{URL: enclosure.URL, Type: enclosure.MimeType})
}
categories := make([]string, 0)
categories = append(categories, userReadingList)
if entry.Feed.Category.Title != "" {
categories = append(categories, fmt.Sprintf(UserLabelPrefix, userID)+entry.Feed.Category.Title)
}
if entry.Status == model.EntryStatusRead {
categories = append(categories, userRead)
}
if entry.Starred {
categories = append(categories, userStarred)
}
entry.Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, entry.Content)
entry.Enclosures.ProxifyEnclosureURL(h.router)
contentItems[i] = contentItem{
ID: fmt.Sprintf(EntryIDLong, entry.ID),
Title: entry.Title,
Author: entry.Author,
TimestampUsec: fmt.Sprintf("%d", entry.Date.UnixMicro()),
CrawlTimeMsec: fmt.Sprintf("%d", entry.CreatedAt.UnixMilli()),
Published: entry.Date.Unix(),
Updated: entry.ChangedAt.Unix(),
Categories: categories,
Canonical: []contentHREF{
{
HREF: entry.URL,
},
},
Alternate: []contentHREFType{
{
HREF: entry.URL,
Type: "text/html",
},
},
Content: contentItemContent{
Direction: "ltr",
Content: entry.Content,
},
Summary: contentItemContent{
Direction: "ltr",
Content: entry.Content,
},
Origin: contentItemOrigin{
StreamID: fmt.Sprintf("feed/%d", entry.FeedID),
Title: entry.Feed.Title,
HTMLUrl: entry.Feed.SiteURL,
},
Enclosure: enclosures,
}
}
result.Items = contentItems
json.OK(w, r, result)
}
func (h *handler) disableTagHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /disable-tags",
slog.String("handler", "disableTagHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
err := r.ParseForm()
if err != nil {
json.BadRequest(w, r, err)
return
}
streams, err := getStreams(r.Form[ParamStreamID], userID)
if err != nil {
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid data in %s", ParamStreamID))
return
}
titles := make([]string, len(streams))
for i, stream := range streams {
if stream.Type != LabelStream {
json.BadRequest(w, r, errors.New("googlereader: only labels are supported"))
return
}
titles[i] = stream.ID
}
err = h.store.RemoveAndReplaceCategoriesByName(userID, titles)
if err != nil {
json.ServerError(w, r, err)
return
}
OK(w, r)
}
func (h *handler) renameTagHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /rename-tag",
slog.String("handler", "renameTagHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
err := r.ParseForm()
if err != nil {
json.BadRequest(w, r, err)
return
}
source, err := getStream(r.Form.Get(ParamStreamID), userID)
if err != nil {
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid data in %s", ParamStreamID))
return
}
destination, err := getStream(r.Form.Get(ParamDestination), userID)
if err != nil {
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid data in %s", ParamDestination))
return
}
if source.Type != LabelStream || destination.Type != LabelStream {
json.BadRequest(w, r, errors.New("googlereader: only labels supported"))
return
}
if destination.ID == "" {
json.BadRequest(w, r, errors.New("googlereader: empty destination name"))
return
}
category, err := h.store.CategoryByTitle(userID, source.ID)
if err != nil {
json.ServerError(w, r, err)
return
}
if category == nil {
json.NotFound(w, r)
return
}
categoryRequest := model.CategoryRequest{
Title: destination.ID,
}
verr := validator.ValidateCategoryModification(h.store, userID, category.ID, &categoryRequest)
if verr != nil {
json.BadRequest(w, r, verr.Error())
return
}
categoryRequest.Patch(category)
err = h.store.UpdateCategory(category)
if err != nil {
json.ServerError(w, r, err)
return
}
OK(w, r)
}
func (h *handler) tagListHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /tags/list",
slog.String("handler", "tagListHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
if err := checkOutputFormat(r); err != nil {
json.BadRequest(w, r, err)
return
}
var result tagsResponse
categories, err := h.store.Categories(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
result.Tags = make([]subscriptionCategory, 0)
result.Tags = append(result.Tags, subscriptionCategory{
ID: fmt.Sprintf(UserStreamPrefix, userID) + Starred,
})
for _, category := range categories {
result.Tags = append(result.Tags, subscriptionCategory{
ID: fmt.Sprintf(UserLabelPrefix, userID) + category.Title,
Label: category.Title,
Type: "folder",
})
}
json.OK(w, r, result)
}
func (h *handler) subscriptionListHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /subscription/list",
slog.String("handler", "subscriptionListHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
if err := checkOutputFormat(r); err != nil {
json.BadRequest(w, r, err)
return
}
var result subscriptionsResponse
feeds, err := h.store.Feeds(userID)
if err != nil {
json.ServerError(w, r, err)
return
}
result.Subscriptions = make([]subscription, 0)
for _, feed := range feeds {
result.Subscriptions = append(result.Subscriptions, subscription{
ID: fmt.Sprintf(FeedPrefix+"%d", feed.ID),
Title: feed.Title,
URL: feed.FeedURL,
Categories: []subscriptionCategory{{fmt.Sprintf(UserLabelPrefix, userID) + feed.Category.Title, feed.Category.Title, "folder"}},
HTMLURL: feed.SiteURL,
IconURL: "", // TODO: Icons are base64 encoded in the DB.
})
}
json.OK(w, r, result)
}
func (h *handler) serveHandler(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] API endpoint not implemented yet",
slog.Any("url", r.RequestURI),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
json.OK(w, r, []string{})
}
func (h *handler) userInfoHandler(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /user-info",
slog.String("handler", "userInfoHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
if err := checkOutputFormat(r); err != nil {
json.BadRequest(w, r, err)
return
}
user, err := h.store.UserByID(request.UserID(r))
if err != nil {
json.ServerError(w, r, err)
return
}
userInfo := userInfo{UserID: fmt.Sprint(user.ID), UserName: user.Username, UserProfileID: fmt.Sprint(user.ID), UserEmail: user.Username}
json.OK(w, r, userInfo)
}
func (h *handler) streamItemIDsHandler(w http.ResponseWriter, r *http.Request) {
userID := request.UserID(r)
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle /stream/items/ids",
slog.String("handler", "streamItemIDsHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", userID),
)
if err := checkOutputFormat(r); err != nil {
json.BadRequest(w, r, err)
return
}
rm, err := getStreamFilterModifiers(r)
if err != nil {
json.ServerError(w, r, err)
return
}
slog.Debug("[GoogleReader] Request Modifiers",
slog.String("handler", "streamItemIDsHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("modifiers", rm),
)
if len(rm.Streams) != 1 {
json.ServerError(w, r, fmt.Errorf("googlereader: only one stream type expected"))
return
}
switch rm.Streams[0].Type {
case ReadingListStream:
h.handleReadingListStreamHandler(w, r, rm)
case StarredStream:
h.handleStarredStreamHandler(w, r, rm)
case ReadStream:
h.handleReadStreamHandler(w, r, rm)
case FeedStream:
h.handleFeedStreamHandler(w, r, rm)
default:
slog.Warn("[GoogleReader] Unknown Stream",
slog.String("handler", "streamItemIDsHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("stream_type", rm.Streams[0].Type),
)
json.ServerError(w, r, fmt.Errorf("googlereader: unknown stream type %s", rm.Streams[0].Type))
}
}
func (h *handler) handleReadingListStreamHandler(w http.ResponseWriter, r *http.Request, rm RequestModifiers) {
clientIP := request.ClientIP(r)
slog.Debug("[GoogleReader] Handle ReadingListStream",
slog.String("handler", "handleReadingListStreamHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
builder := h.store.NewEntryQueryBuilder(rm.UserID)
for _, s := range rm.ExcludeTargets {
switch s.Type {
case ReadStream:
builder.WithStatus(model.EntryStatusUnread)
default:
slog.Warn("[GoogleReader] Unknown ExcludeTargets filter type",
slog.String("handler", "handleReadingListStreamHandler"),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("filter_type", s.Type),
)
}
}
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithLimit(rm.Count)
builder.WithOffset(rm.Offset)
builder.WithSorting(model.DefaultSortingOrder, rm.SortDirection)
if rm.StartTime > 0 {
builder.AfterPublishedDate(time.Unix(rm.StartTime, 0))
}
if rm.StopTime > 0 {
builder.BeforePublishedDate(time.Unix(rm.StopTime, 0))
}
rawEntryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemRefs = make([]itemRef, 0)
for _, entryID := range rawEntryIDs {
formattedID := strconv.FormatInt(entryID, 10)
itemRefs = append(itemRefs, itemRef{ID: formattedID})
}
totalEntries, err := builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
continuation := 0
if len(itemRefs)+rm.Offset < totalEntries {
continuation = len(itemRefs) + rm.Offset
}
json.OK(w, r, streamIDResponse{itemRefs, continuation})
}
func (h *handler) handleStarredStreamHandler(w http.ResponseWriter, r *http.Request, rm RequestModifiers) {
builder := h.store.NewEntryQueryBuilder(rm.UserID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithStarred(true)
builder.WithLimit(rm.Count)
builder.WithOffset(rm.Offset)
builder.WithSorting(model.DefaultSortingOrder, rm.SortDirection)
if rm.StartTime > 0 {
builder.AfterPublishedDate(time.Unix(rm.StartTime, 0))
}
if rm.StopTime > 0 {
builder.BeforePublishedDate(time.Unix(rm.StopTime, 0))
}
rawEntryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemRefs = make([]itemRef, 0)
for _, entryID := range rawEntryIDs {
formattedID := strconv.FormatInt(entryID, 10)
itemRefs = append(itemRefs, itemRef{ID: formattedID})
}
totalEntries, err := builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
continuation := 0
if len(itemRefs)+rm.Offset < totalEntries {
continuation = len(itemRefs) + rm.Offset
}
json.OK(w, r, streamIDResponse{itemRefs, continuation})
}
func (h *handler) handleReadStreamHandler(w http.ResponseWriter, r *http.Request, rm RequestModifiers) {
builder := h.store.NewEntryQueryBuilder(rm.UserID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithStatus(model.EntryStatusRead)
builder.WithLimit(rm.Count)
builder.WithOffset(rm.Offset)
builder.WithSorting(model.DefaultSortingOrder, rm.SortDirection)
if rm.StartTime > 0 {
builder.AfterPublishedDate(time.Unix(rm.StartTime, 0))
}
if rm.StopTime > 0 {
builder.BeforePublishedDate(time.Unix(rm.StopTime, 0))
}
rawEntryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemRefs = make([]itemRef, 0)
for _, entryID := range rawEntryIDs {
formattedID := strconv.FormatInt(entryID, 10)
itemRefs = append(itemRefs, itemRef{ID: formattedID})
}
totalEntries, err := builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
continuation := 0
if len(itemRefs)+rm.Offset < totalEntries {
continuation = len(itemRefs) + rm.Offset
}
json.OK(w, r, streamIDResponse{itemRefs, continuation})
}
func (h *handler) handleFeedStreamHandler(w http.ResponseWriter, r *http.Request, rm RequestModifiers) {
feedID, err := strconv.ParseInt(rm.Streams[0].ID, 10, 64)
if err != nil {
json.ServerError(w, r, err)
return
}
builder := h.store.NewEntryQueryBuilder(rm.UserID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithFeedID(feedID)
builder.WithLimit(rm.Count)
builder.WithOffset(rm.Offset)
builder.WithSorting(model.DefaultSortingOrder, rm.SortDirection)
if rm.StartTime > 0 {
builder.AfterPublishedDate(time.Unix(rm.StartTime, 0))
}
if rm.StopTime > 0 {
builder.BeforePublishedDate(time.Unix(rm.StopTime, 0))
}
if len(rm.ExcludeTargets) > 0 {
for _, s := range rm.ExcludeTargets {
if s.Type == ReadStream {
builder.WithoutStatus(model.EntryStatusRead)
}
}
}
rawEntryIDs, err := builder.GetEntryIDs()
if err != nil {
json.ServerError(w, r, err)
return
}
var itemRefs = make([]itemRef, 0)
for _, entryID := range rawEntryIDs {
formattedID := strconv.FormatInt(entryID, 10)
itemRefs = append(itemRefs, itemRef{ID: formattedID})
}
totalEntries, err := builder.CountEntries()
if err != nil {
json.ServerError(w, r, err)
return
}
continuation := 0
if len(itemRefs)+rm.Offset < totalEntries {
continuation = len(itemRefs) + rm.Offset
}
json.OK(w, r, streamIDResponse{itemRefs, continuation})
}
v2-2.2.6/internal/googlereader/middleware.go 0000664 0000000 0000000 00000014107 14756465373 0021016 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package googlereader // import "miniflux.app/v2/internal/googlereader"
import (
"context"
"crypto/hmac"
"crypto/sha1"
"encoding/hex"
"log/slog"
"net/http"
"strings"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
)
type middleware struct {
store *storage.Storage
}
func newMiddleware(s *storage.Storage) *middleware {
return &middleware{s}
}
func (m *middleware) handleCORS(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "Authorization")
if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusOK)
return
}
next.ServeHTTP(w, r)
})
}
func (m *middleware) apiKeyAuth(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
clientIP := request.ClientIP(r)
var token string
if r.Method == http.MethodPost {
if err := r.ParseForm(); err != nil {
slog.Warn("[GoogleReader] Could not parse request form data",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("error", err),
)
Unauthorized(w, r)
return
}
token = r.Form.Get("T")
if token == "" {
slog.Warn("[GoogleReader] Post-Form T field is empty",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
} else {
authorization := r.Header.Get("Authorization")
if authorization == "" {
slog.Warn("[GoogleReader] No token provided",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
fields := strings.Fields(authorization)
if len(fields) != 2 {
slog.Warn("[GoogleReader] Authorization header does not have the expected GoogleLogin format auth=xxxxxx",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
if fields[0] != "GoogleLogin" {
slog.Warn("[GoogleReader] Authorization header does not begin with GoogleLogin",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
auths := strings.Split(fields[1], "=")
if len(auths) != 2 {
slog.Warn("[GoogleReader] Authorization header does not have the expected GoogleLogin format auth=xxxxxx",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
if auths[0] != "auth" {
slog.Warn("[GoogleReader] Authorization header does not have the expected GoogleLogin format auth=xxxxxx",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
token = auths[1]
}
parts := strings.Split(token, "/")
if len(parts) != 2 {
slog.Warn("[GoogleReader] Auth token does not have the expected structure username/hash",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.String("token", token),
)
Unauthorized(w, r)
return
}
var integration *model.Integration
var user *model.User
var err error
if integration, err = m.store.GoogleReaderUserGetIntegration(parts[0]); err != nil {
slog.Warn("[GoogleReader] No user found with the given Google Reader username",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("error", err),
)
Unauthorized(w, r)
return
}
expectedToken := getAuthToken(integration.GoogleReaderUsername, integration.GoogleReaderPassword)
if expectedToken != token {
slog.Warn("[GoogleReader] Token does not match",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
if user, err = m.store.UserByID(integration.UserID); err != nil {
slog.Error("[GoogleReader] Unable to fetch user from database",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Any("error", err),
)
Unauthorized(w, r)
return
}
if user == nil {
slog.Warn("[GoogleReader] No user found with the given Google Reader credentials",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
)
Unauthorized(w, r)
return
}
slog.Info("[GoogleReader] User authenticated successfully",
slog.Bool("authentication_successful", true),
slog.String("client_ip", clientIP),
slog.String("user_agent", r.UserAgent()),
slog.Int64("user_id", user.ID),
slog.String("username", user.Username),
)
m.store.SetLastLogin(integration.UserID)
ctx := r.Context()
ctx = context.WithValue(ctx, request.UserIDContextKey, user.ID)
ctx = context.WithValue(ctx, request.UserTimezoneContextKey, user.Timezone)
ctx = context.WithValue(ctx, request.IsAdminUserContextKey, user.IsAdmin)
ctx = context.WithValue(ctx, request.IsAuthenticatedContextKey, true)
ctx = context.WithValue(ctx, request.GoogleReaderToken, token)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func getAuthToken(username, password string) string {
token := hex.EncodeToString(hmac.New(sha1.New, []byte(username+password)).Sum(nil))
token = username + "/" + token
return token
}
v2-2.2.6/internal/googlereader/response.go 0000664 0000000 0000000 00000010076 14756465373 0020540 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package googlereader // import "miniflux.app/v2/internal/googlereader"
import (
"fmt"
"net/http"
"miniflux.app/v2/internal/http/response"
)
type login struct {
SID string `json:"SID,omitempty"`
LSID string `json:"LSID,omitempty"`
Auth string `json:"Auth,omitempty"`
}
func (l login) String() string {
return fmt.Sprintf("SID=%s\nLSID=%s\nAuth=%s\n", l.SID, l.LSID, l.Auth)
}
type userInfo struct {
UserID string `json:"userId"`
UserName string `json:"userName"`
UserProfileID string `json:"userProfileId"`
UserEmail string `json:"userEmail"`
}
type subscription struct {
ID string `json:"id"`
Title string `json:"title"`
Categories []subscriptionCategory `json:"categories"`
URL string `json:"url"`
HTMLURL string `json:"htmlUrl"`
IconURL string `json:"iconUrl"`
}
type quickAddResponse struct {
NumResults int64 `json:"numResults"`
Query string `json:"query,omitempty"`
StreamID string `json:"streamId,omitempty"`
StreamName string `json:"streamName,omitempty"`
}
type subscriptionCategory struct {
ID string `json:"id"`
Label string `json:"label,omitempty"`
Type string `json:"type,omitempty"`
}
type subscriptionsResponse struct {
Subscriptions []subscription `json:"subscriptions"`
}
type itemRef struct {
ID string `json:"id"`
DirectStreamIDs string `json:"directStreamIds,omitempty"`
TimestampUsec string `json:"timestampUsec,omitempty"`
}
type streamIDResponse struct {
ItemRefs []itemRef `json:"itemRefs"`
Continuation int `json:"continuation,omitempty,string"`
}
type tagsResponse struct {
Tags []subscriptionCategory `json:"tags"`
}
type streamContentItems struct {
Direction string `json:"direction"`
ID string `json:"id"`
Title string `json:"title"`
Self []contentHREF `json:"self"`
Alternate []contentHREFType `json:"alternate"`
Updated int64 `json:"updated"`
Items []contentItem `json:"items"`
Author string `json:"author"`
}
type contentItem struct {
ID string `json:"id"`
Categories []string `json:"categories"`
Title string `json:"title"`
CrawlTimeMsec string `json:"crawlTimeMsec"`
TimestampUsec string `json:"timestampUsec"`
Published int64 `json:"published"`
Updated int64 `json:"updated"`
Author string `json:"author"`
Alternate []contentHREFType `json:"alternate"`
Summary contentItemContent `json:"summary"`
Content contentItemContent `json:"content"`
Origin contentItemOrigin `json:"origin"`
Enclosure []contentItemEnclosure `json:"enclosure"`
Canonical []contentHREF `json:"canonical"`
}
type contentHREFType struct {
HREF string `json:"href"`
Type string `json:"type"`
}
type contentHREF struct {
HREF string `json:"href"`
}
type contentItemEnclosure struct {
URL string `json:"url"`
Type string `json:"type"`
}
type contentItemContent struct {
Direction string `json:"direction"`
Content string `json:"content"`
}
type contentItemOrigin struct {
StreamID string `json:"streamId"`
Title string `json:"title"`
HTMLUrl string `json:"htmlUrl"`
}
// Unauthorized sends a not authorized error to the client.
func Unauthorized(w http.ResponseWriter, r *http.Request) {
builder := response.New(w, r)
builder.WithStatus(http.StatusUnauthorized)
builder.WithHeader("Content-Type", "text/plain")
builder.WithHeader("X-Reader-Google-Bad-Token", "true")
builder.WithBody("Unauthorized")
builder.Write()
}
// OK sends a ok response to the client.
func OK(w http.ResponseWriter, r *http.Request) {
builder := response.New(w, r)
builder.WithStatus(http.StatusOK)
builder.WithHeader("Content-Type", "text/plain")
builder.WithBody("OK")
builder.Write()
}
v2-2.2.6/internal/http/ 0000775 0000000 0000000 00000000000 14756465373 0014667 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/cookie/ 0000775 0000000 0000000 00000000000 14756465373 0016140 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/cookie/cookie.go 0000664 0000000 0000000 00000002217 14756465373 0017742 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package cookie // import "miniflux.app/v2/internal/http/cookie"
import (
"net/http"
"time"
"miniflux.app/v2/internal/config"
)
// Cookie names.
const (
CookieAppSessionID = "MinifluxAppSessionID"
CookieUserSessionID = "MinifluxUserSessionID"
)
// New creates a new cookie.
func New(name, value string, isHTTPS bool, path string) *http.Cookie {
return &http.Cookie{
Name: name,
Value: value,
Path: basePath(path),
Secure: isHTTPS,
HttpOnly: true,
Expires: time.Now().Add(time.Duration(config.Opts.CleanupRemoveSessionsDays()) * 24 * time.Hour),
SameSite: http.SameSiteLaxMode,
}
}
// Expired returns an expired cookie.
func Expired(name string, isHTTPS bool, path string) *http.Cookie {
return &http.Cookie{
Name: name,
Value: "",
Path: basePath(path),
Secure: isHTTPS,
HttpOnly: true,
MaxAge: -1,
Expires: time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC),
SameSite: http.SameSiteLaxMode,
}
}
func basePath(path string) string {
if path == "" {
return "/"
}
return path
}
v2-2.2.6/internal/http/request/ 0000775 0000000 0000000 00000000000 14756465373 0016357 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/request/client_ip.go 0000664 0000000 0000000 00000002221 14756465373 0020651 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net"
"net/http"
"strings"
)
// FindClientIP returns the client real IP address based on trusted Reverse-Proxy HTTP headers.
func FindClientIP(r *http.Request) string {
headers := []string{"X-Forwarded-For", "X-Real-Ip"}
for _, header := range headers {
value := r.Header.Get(header)
if value != "" {
addresses := strings.Split(value, ",")
address := strings.TrimSpace(addresses[0])
address = dropIPv6zone(address)
if net.ParseIP(address) != nil {
return address
}
}
}
// Fallback to TCP/IP source IP address.
return FindRemoteIP(r)
}
// FindRemoteIP returns remote client IP address without considering HTTP headers.
func FindRemoteIP(r *http.Request) string {
remoteIP, _, err := net.SplitHostPort(r.RemoteAddr)
if err != nil {
remoteIP = r.RemoteAddr
}
return dropIPv6zone(remoteIP)
}
func dropIPv6zone(address string) string {
i := strings.IndexByte(address, '%')
if i != -1 {
address = address[:i]
}
return address
}
v2-2.2.6/internal/http/request/client_ip_test.go 0000664 0000000 0000000 00000007444 14756465373 0021724 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net/http"
"testing"
)
func TestFindClientIPWithoutHeaders(t *testing.T) {
r := &http.Request{RemoteAddr: "192.168.0.1:4242"}
if ip := FindClientIP(r); ip != "192.168.0.1" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
r = &http.Request{RemoteAddr: "192.168.0.1"}
if ip := FindClientIP(r); ip != "192.168.0.1" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
r = &http.Request{RemoteAddr: "fe80::14c2:f039:edc7:edc7"}
if ip := FindClientIP(r); ip != "fe80::14c2:f039:edc7:edc7" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
r = &http.Request{RemoteAddr: "fe80::14c2:f039:edc7:edc7%eth0"}
if ip := FindClientIP(r); ip != "fe80::14c2:f039:edc7:edc7" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
r = &http.Request{RemoteAddr: "[fe80::14c2:f039:edc7:edc7%eth0]:4242"}
if ip := FindClientIP(r); ip != "fe80::14c2:f039:edc7:edc7" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
func TestFindClientIPWithXFFHeader(t *testing.T) {
// Test with multiple IPv4 addresses.
headers := http.Header{}
headers.Set("X-Forwarded-For", "203.0.113.195, 70.41.3.18, 150.172.238.178")
r := &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "203.0.113.195" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
// Test with single IPv6 address.
headers = http.Header{}
headers.Set("X-Forwarded-For", "2001:db8:85a3:8d3:1319:8a2e:370:7348")
r = &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "2001:db8:85a3:8d3:1319:8a2e:370:7348" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
// Test with single IPv6 address with zone
headers = http.Header{}
headers.Set("X-Forwarded-For", "fe80::14c2:f039:edc7:edc7%eth0")
r = &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "fe80::14c2:f039:edc7:edc7" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
// Test with single IPv4 address.
headers = http.Header{}
headers.Set("X-Forwarded-For", "70.41.3.18")
r = &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "70.41.3.18" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
// Test with invalid IP address.
headers = http.Header{}
headers.Set("X-Forwarded-For", "fake IP")
r = &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "192.168.0.1" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
func TestClientIPWithXRealIPHeader(t *testing.T) {
headers := http.Header{}
headers.Set("X-Real-Ip", "192.168.122.1")
r := &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "192.168.122.1" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
func TestClientIPWithBothHeaders(t *testing.T) {
headers := http.Header{}
headers.Set("X-Forwarded-For", "203.0.113.195, 70.41.3.18, 150.172.238.178")
headers.Set("X-Real-Ip", "192.168.122.1")
r := &http.Request{RemoteAddr: "192.168.0.1:4242", Header: headers}
if ip := FindClientIP(r); ip != "203.0.113.195" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
func TestClientIPWithUnixSocketRemoteAddress(t *testing.T) {
r := &http.Request{RemoteAddr: "@"}
if ip := FindClientIP(r); ip != "@" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
func TestClientIPWithUnixSocketRemoteAddrAndBothHeaders(t *testing.T) {
headers := http.Header{}
headers.Set("X-Forwarded-For", "203.0.113.195, 70.41.3.18, 150.172.238.178")
headers.Set("X-Real-Ip", "192.168.122.1")
r := &http.Request{RemoteAddr: "@", Header: headers}
if ip := FindClientIP(r); ip != "203.0.113.195" {
t.Fatalf(`Unexpected result, got: %q`, ip)
}
}
v2-2.2.6/internal/http/request/context.go 0000664 0000000 0000000 00000010747 14756465373 0020403 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net/http"
"strconv"
"miniflux.app/v2/internal/model"
)
// ContextKey represents a context key.
type ContextKey int
// List of context keys.
const (
UserIDContextKey ContextKey = iota
UserTimezoneContextKey
IsAdminUserContextKey
IsAuthenticatedContextKey
UserSessionTokenContextKey
UserLanguageContextKey
UserThemeContextKey
SessionIDContextKey
CSRFContextKey
OAuth2StateContextKey
OAuth2CodeVerifierContextKey
FlashMessageContextKey
FlashErrorMessageContextKey
PocketRequestTokenContextKey
LastForceRefreshContextKey
ClientIPContextKey
GoogleReaderToken
WebAuthnDataContextKey
)
func WebAuthnSessionData(r *http.Request) *model.WebAuthnSession {
if v := r.Context().Value(WebAuthnDataContextKey); v != nil {
if value, valid := v.(model.WebAuthnSession); valid {
return &value
}
}
return nil
}
// GoolgeReaderToken returns the google reader token if it exists.
func GoolgeReaderToken(r *http.Request) string {
return getContextStringValue(r, GoogleReaderToken)
}
// IsAdminUser checks if the logged user is administrator.
func IsAdminUser(r *http.Request) bool {
return getContextBoolValue(r, IsAdminUserContextKey)
}
// IsAuthenticated returns a boolean if the user is authenticated.
func IsAuthenticated(r *http.Request) bool {
return getContextBoolValue(r, IsAuthenticatedContextKey)
}
// UserID returns the UserID of the logged user.
func UserID(r *http.Request) int64 {
return getContextInt64Value(r, UserIDContextKey)
}
// UserTimezone returns the timezone used by the logged user.
func UserTimezone(r *http.Request) string {
value := getContextStringValue(r, UserTimezoneContextKey)
if value == "" {
value = "UTC"
}
return value
}
// UserLanguage get the locale used by the current logged user.
func UserLanguage(r *http.Request) string {
language := getContextStringValue(r, UserLanguageContextKey)
if language == "" {
language = "en_US"
}
return language
}
// UserTheme get the theme used by the current logged user.
func UserTheme(r *http.Request) string {
theme := getContextStringValue(r, UserThemeContextKey)
if theme == "" {
theme = "system_serif"
}
return theme
}
// CSRF returns the current CSRF token.
func CSRF(r *http.Request) string {
return getContextStringValue(r, CSRFContextKey)
}
// SessionID returns the current session ID.
func SessionID(r *http.Request) string {
return getContextStringValue(r, SessionIDContextKey)
}
// UserSessionToken returns the current user session token.
func UserSessionToken(r *http.Request) string {
return getContextStringValue(r, UserSessionTokenContextKey)
}
// OAuth2State returns the current OAuth2 state.
func OAuth2State(r *http.Request) string {
return getContextStringValue(r, OAuth2StateContextKey)
}
func OAuth2CodeVerifier(r *http.Request) string {
return getContextStringValue(r, OAuth2CodeVerifierContextKey)
}
// FlashMessage returns the message message if any.
func FlashMessage(r *http.Request) string {
return getContextStringValue(r, FlashMessageContextKey)
}
// FlashErrorMessage returns the message error message if any.
func FlashErrorMessage(r *http.Request) string {
return getContextStringValue(r, FlashErrorMessageContextKey)
}
// PocketRequestToken returns the Pocket Request Token if any.
func PocketRequestToken(r *http.Request) string {
return getContextStringValue(r, PocketRequestTokenContextKey)
}
// LastForceRefresh returns the last force refresh timestamp.
func LastForceRefresh(r *http.Request) int64 {
jsonStringValue := getContextStringValue(r, LastForceRefreshContextKey)
timestamp, err := strconv.ParseInt(jsonStringValue, 10, 64)
if err != nil {
return 0
}
return timestamp
}
// ClientIP returns the client IP address stored in the context.
func ClientIP(r *http.Request) string {
return getContextStringValue(r, ClientIPContextKey)
}
func getContextStringValue(r *http.Request, key ContextKey) string {
if v := r.Context().Value(key); v != nil {
if value, valid := v.(string); valid {
return value
}
}
return ""
}
func getContextBoolValue(r *http.Request, key ContextKey) bool {
if v := r.Context().Value(key); v != nil {
if value, valid := v.(bool); valid {
return value
}
}
return false
}
func getContextInt64Value(r *http.Request, key ContextKey) int64 {
if v := r.Context().Value(key); v != nil {
if value, valid := v.(int64); valid {
return value
}
}
return 0
}
v2-2.2.6/internal/http/request/context_test.go 0000664 0000000 0000000 00000025037 14756465373 0021440 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"context"
"net/http"
"testing"
)
func TestContextStringValue(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, ClientIPContextKey, "IP")
r = r.WithContext(ctx)
result := getContextStringValue(r, ClientIPContextKey)
expected := "IP"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestContextStringValueWithInvalidType(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, ClientIPContextKey, 0)
r = r.WithContext(ctx)
result := getContextStringValue(r, ClientIPContextKey)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestContextStringValueWhenUnset(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := getContextStringValue(r, ClientIPContextKey)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestContextBoolValue(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, IsAdminUserContextKey, true)
r = r.WithContext(ctx)
result := getContextBoolValue(r, IsAdminUserContextKey)
expected := true
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestContextBoolValueWithInvalidType(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, IsAdminUserContextKey, "invalid")
r = r.WithContext(ctx)
result := getContextBoolValue(r, IsAdminUserContextKey)
expected := false
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestContextBoolValueWhenUnset(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := getContextBoolValue(r, IsAdminUserContextKey)
expected := false
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestContextInt64Value(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, UserIDContextKey, int64(1234))
r = r.WithContext(ctx)
result := getContextInt64Value(r, UserIDContextKey)
expected := int64(1234)
if result != expected {
t.Errorf(`Unexpected context value, got %d instead of %d`, result, expected)
}
}
func TestContextInt64ValueWithInvalidType(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
ctx := r.Context()
ctx = context.WithValue(ctx, UserIDContextKey, "invalid")
r = r.WithContext(ctx)
result := getContextInt64Value(r, UserIDContextKey)
expected := int64(0)
if result != expected {
t.Errorf(`Unexpected context value, got %d instead of %d`, result, expected)
}
}
func TestContextInt64ValueWhenUnset(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := getContextInt64Value(r, UserIDContextKey)
expected := int64(0)
if result != expected {
t.Errorf(`Unexpected context value, got %d instead of %d`, result, expected)
}
}
func TestIsAdmin(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := IsAdminUser(r)
expected := false
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, IsAdminUserContextKey, true)
r = r.WithContext(ctx)
result = IsAdminUser(r)
expected = true
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestIsAuthenticated(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := IsAuthenticated(r)
expected := false
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, IsAuthenticatedContextKey, true)
r = r.WithContext(ctx)
result = IsAuthenticated(r)
expected = true
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestUserID(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := UserID(r)
expected := int64(0)
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, UserIDContextKey, int64(123))
r = r.WithContext(ctx)
result = UserID(r)
expected = int64(123)
if result != expected {
t.Errorf(`Unexpected context value, got %v instead of %v`, result, expected)
}
}
func TestUserTimezone(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := UserTimezone(r)
expected := "UTC"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, UserTimezoneContextKey, "Europe/Paris")
r = r.WithContext(ctx)
result = UserTimezone(r)
expected = "Europe/Paris"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestUserLanguage(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := UserLanguage(r)
expected := "en_US"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, UserLanguageContextKey, "fr_FR")
r = r.WithContext(ctx)
result = UserLanguage(r)
expected = "fr_FR"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestUserTheme(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := UserTheme(r)
expected := "system_serif"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, UserThemeContextKey, "dark_serif")
r = r.WithContext(ctx)
result = UserTheme(r)
expected = "dark_serif"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestCSRF(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := CSRF(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, CSRFContextKey, "secret")
r = r.WithContext(ctx)
result = CSRF(r)
expected = "secret"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestSessionID(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := SessionID(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, SessionIDContextKey, "id")
r = r.WithContext(ctx)
result = SessionID(r)
expected = "id"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestUserSessionToken(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := UserSessionToken(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, UserSessionTokenContextKey, "token")
r = r.WithContext(ctx)
result = UserSessionToken(r)
expected = "token"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestOAuth2State(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := OAuth2State(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, OAuth2StateContextKey, "state")
r = r.WithContext(ctx)
result = OAuth2State(r)
expected = "state"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestFlashMessage(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := FlashMessage(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, FlashMessageContextKey, "message")
r = r.WithContext(ctx)
result = FlashMessage(r)
expected = "message"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestFlashErrorMessage(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := FlashErrorMessage(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, FlashErrorMessageContextKey, "error message")
r = r.WithContext(ctx)
result = FlashErrorMessage(r)
expected = "error message"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestPocketRequestToken(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := PocketRequestToken(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, PocketRequestTokenContextKey, "request token")
r = r.WithContext(ctx)
result = PocketRequestToken(r)
expected = "request token"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
func TestClientIP(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := ClientIP(r)
expected := ""
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
ctx := r.Context()
ctx = context.WithValue(ctx, ClientIPContextKey, "127.0.0.1")
r = r.WithContext(ctx)
result = ClientIP(r)
expected = "127.0.0.1"
if result != expected {
t.Errorf(`Unexpected context value, got %q instead of %q`, result, expected)
}
}
v2-2.2.6/internal/http/request/cookie.go 0000664 0000000 0000000 00000000606 14756465373 0020161 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import "net/http"
// CookieValue returns the cookie value.
func CookieValue(r *http.Request, name string) string {
cookie, err := r.Cookie(name)
if err != nil {
return ""
}
return cookie.Value
}
v2-2.2.6/internal/http/request/cookie_test.go 0000664 0000000 0000000 00000001511 14756465373 0021214 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net/http"
"testing"
)
func TestGetCookieValue(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
r.AddCookie(&http.Cookie{Value: "cookie_value", Name: "my_cookie"})
result := CookieValue(r, "my_cookie")
expected := "cookie_value"
if result != expected {
t.Errorf(`Unexpected cookie value, got %q instead of %q`, result, expected)
}
}
func TestGetCookieValueWhenUnset(t *testing.T) {
r, _ := http.NewRequest("GET", "http://example.org", nil)
result := CookieValue(r, "my_cookie")
expected := ""
if result != expected {
t.Errorf(`Unexpected cookie value, got %q instead of %q`, result, expected)
}
}
v2-2.2.6/internal/http/request/params.go 0000664 0000000 0000000 00000005360 14756465373 0020175 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net/http"
"strconv"
"strings"
"github.com/gorilla/mux"
)
// FormInt64Value returns a form value as integer.
func FormInt64Value(r *http.Request, param string) int64 {
value := r.FormValue(param)
integer, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return 0
}
return integer
}
// RouteInt64Param returns an URL route parameter as int64.
func RouteInt64Param(r *http.Request, param string) int64 {
vars := mux.Vars(r)
value, err := strconv.ParseInt(vars[param], 10, 64)
if err != nil {
return 0
}
if value < 0 {
return 0
}
return value
}
// RouteStringParam returns a URL route parameter as string.
func RouteStringParam(r *http.Request, param string) string {
vars := mux.Vars(r)
return vars[param]
}
// QueryStringParam returns a query string parameter as string.
func QueryStringParam(r *http.Request, param, defaultValue string) string {
value := r.URL.Query().Get(param)
if value == "" {
value = defaultValue
}
return value
}
// QueryStringParamList returns all values associated to the parameter.
func QueryStringParamList(r *http.Request, param string) []string {
var results []string
values := r.URL.Query()
if _, found := values[param]; found {
for _, value := range values[param] {
value = strings.TrimSpace(value)
if value != "" {
results = append(results, value)
}
}
}
return results
}
// QueryIntParam returns a query string parameter as integer.
func QueryIntParam(r *http.Request, param string, defaultValue int) int {
value := r.URL.Query().Get(param)
if value == "" {
return defaultValue
}
val, err := strconv.ParseInt(value, 10, 0)
if err != nil {
return defaultValue
}
if val < 0 {
return defaultValue
}
return int(val)
}
// QueryInt64Param returns a query string parameter as int64.
func QueryInt64Param(r *http.Request, param string, defaultValue int64) int64 {
value := r.URL.Query().Get(param)
if value == "" {
return defaultValue
}
val, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return defaultValue
}
if val < 0 {
return defaultValue
}
return val
}
// QueryBoolParam returns a query string parameter as bool.
func QueryBoolParam(r *http.Request, param string, defaultValue bool) bool {
value := r.URL.Query().Get(param)
if value == "" {
return defaultValue
}
val, err := strconv.ParseBool(value)
if err != nil {
return defaultValue
}
return val
}
// HasQueryParam checks if the query string contains the given parameter.
func HasQueryParam(r *http.Request, param string) bool {
values := r.URL.Query()
_, ok := values[param]
return ok
}
v2-2.2.6/internal/http/request/params_test.go 0000664 0000000 0000000 00000012064 14756465373 0021233 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package request // import "miniflux.app/v2/internal/http/request"
import (
"net/http"
"net/http/httptest"
"net/url"
"testing"
"github.com/gorilla/mux"
)
func TestFormInt64Value(t *testing.T) {
f := url.Values{}
f.Set("integer value", "42")
f.Set("invalid value", "invalid integer")
r := &http.Request{Form: f}
result := FormInt64Value(r, "integer value")
expected := int64(42)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = FormInt64Value(r, "invalid value")
expected = int64(0)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = FormInt64Value(r, "missing value")
expected = int64(0)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
}
func TestRouteStringParam(t *testing.T) {
router := mux.NewRouter()
router.HandleFunc("/route/{variable}/index", func(w http.ResponseWriter, r *http.Request) {
result := RouteStringParam(r, "variable")
expected := "value"
if result != expected {
t.Errorf(`Unexpected result, got %q instead of %q`, result, expected)
}
result = RouteStringParam(r, "missing variable")
expected = ""
if result != expected {
t.Errorf(`Unexpected result, got %q instead of %q`, result, expected)
}
})
r, err := http.NewRequest("GET", "/route/value/index", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
router.ServeHTTP(w, r)
}
func TestRouteInt64Param(t *testing.T) {
router := mux.NewRouter()
router.HandleFunc("/a/{variable1}/b/{variable2}/c/{variable3}", func(w http.ResponseWriter, r *http.Request) {
result := RouteInt64Param(r, "variable1")
expected := int64(42)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = RouteInt64Param(r, "missing variable")
expected = 0
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = RouteInt64Param(r, "variable2")
expected = 0
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = RouteInt64Param(r, "variable3")
expected = 0
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
})
r, err := http.NewRequest("GET", "/a/42/b/not-int/c/-10", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
router.ServeHTTP(w, r)
}
func TestQueryStringParam(t *testing.T) {
u, _ := url.Parse("http://example.org/?key=value")
r := &http.Request{URL: u}
result := QueryStringParam(r, "key", "fallback")
expected := "value"
if result != expected {
t.Errorf(`Unexpected result, got %q instead of %q`, result, expected)
}
result = QueryStringParam(r, "missing key", "fallback")
expected = "fallback"
if result != expected {
t.Errorf(`Unexpected result, got %q instead of %q`, result, expected)
}
}
func TestQueryIntParam(t *testing.T) {
u, _ := url.Parse("http://example.org/?key=42&invalid=value&negative=-5")
r := &http.Request{URL: u}
result := QueryIntParam(r, "key", 84)
expected := 42
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryIntParam(r, "missing key", 84)
expected = 84
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryIntParam(r, "negative", 69)
expected = 69
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryIntParam(r, "invalid", 99)
expected = 99
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
}
func TestQueryInt64Param(t *testing.T) {
u, _ := url.Parse("http://example.org/?key=42&invalid=value&negative=-5")
r := &http.Request{URL: u}
result := QueryInt64Param(r, "key", int64(84))
expected := int64(42)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryInt64Param(r, "missing key", int64(84))
expected = int64(84)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryInt64Param(r, "invalid", int64(69))
expected = int64(69)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
result = QueryInt64Param(r, "invalid", int64(99))
expected = int64(99)
if result != expected {
t.Errorf(`Unexpected result, got %d instead of %d`, result, expected)
}
}
func TestHasQueryParam(t *testing.T) {
u, _ := url.Parse("http://example.org/?key=42")
r := &http.Request{URL: u}
result := HasQueryParam(r, "key")
expected := true
if result != expected {
t.Errorf(`Unexpected result, got %v instead of %v`, result, expected)
}
result = HasQueryParam(r, "missing key")
expected = false
if result != expected {
t.Errorf(`Unexpected result, got %v instead of %v`, result, expected)
}
}
v2-2.2.6/internal/http/response/ 0000775 0000000 0000000 00000000000 14756465373 0016525 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/response/builder.go 0000664 0000000 0000000 00000007212 14756465373 0020504 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package response // import "miniflux.app/v2/internal/http/response"
import (
"compress/flate"
"compress/gzip"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
"github.com/andybalholm/brotli"
)
const compressionThreshold = 1024
// Builder generates HTTP responses.
type Builder struct {
w http.ResponseWriter
r *http.Request
statusCode int
headers map[string]string
enableCompression bool
body any
}
// WithStatus uses the given status code to build the response.
func (b *Builder) WithStatus(statusCode int) *Builder {
b.statusCode = statusCode
return b
}
// WithHeader adds the given HTTP header to the response.
func (b *Builder) WithHeader(key, value string) *Builder {
b.headers[key] = value
return b
}
// WithBody uses the given body to build the response.
func (b *Builder) WithBody(body any) *Builder {
b.body = body
return b
}
// WithAttachment forces the document to be downloaded by the web browser.
func (b *Builder) WithAttachment(filename string) *Builder {
b.headers["Content-Disposition"] = fmt.Sprintf("attachment; filename=%s", filename)
return b
}
// WithoutCompression disables HTTP compression.
func (b *Builder) WithoutCompression() *Builder {
b.enableCompression = false
return b
}
// WithCaching adds caching headers to the response.
func (b *Builder) WithCaching(etag string, duration time.Duration, callback func(*Builder)) {
b.headers["ETag"] = etag
b.headers["Cache-Control"] = "public"
b.headers["Expires"] = time.Now().Add(duration).UTC().Format(http.TimeFormat)
if etag == b.r.Header.Get("If-None-Match") {
b.statusCode = http.StatusNotModified
b.body = nil
b.Write()
} else {
callback(b)
}
}
// Write generates the HTTP response.
func (b *Builder) Write() {
if b.body == nil {
b.writeHeaders()
return
}
switch v := b.body.(type) {
case []byte:
b.compress(v)
case string:
b.compress([]byte(v))
case error:
b.compress([]byte(v.Error()))
case io.Reader:
// Compression not implemented in this case
b.writeHeaders()
_, err := io.Copy(b.w, v)
if err != nil {
slog.Error("Unable to write response body", slog.Any("error", err))
}
}
}
func (b *Builder) writeHeaders() {
b.headers["X-Content-Type-Options"] = "nosniff"
b.headers["X-Frame-Options"] = "DENY"
b.headers["Referrer-Policy"] = "no-referrer"
for key, value := range b.headers {
b.w.Header().Set(key, value)
}
b.w.WriteHeader(b.statusCode)
}
func (b *Builder) compress(data []byte) {
if b.enableCompression && len(data) > compressionThreshold {
acceptEncoding := b.r.Header.Get("Accept-Encoding")
switch {
case strings.Contains(acceptEncoding, "br"):
b.headers["Content-Encoding"] = "br"
b.writeHeaders()
brotliWriter := brotli.NewWriterV2(b.w, brotli.DefaultCompression)
defer brotliWriter.Close()
brotliWriter.Write(data)
return
case strings.Contains(acceptEncoding, "gzip"):
b.headers["Content-Encoding"] = "gzip"
b.writeHeaders()
gzipWriter := gzip.NewWriter(b.w)
defer gzipWriter.Close()
gzipWriter.Write(data)
return
case strings.Contains(acceptEncoding, "deflate"):
b.headers["Content-Encoding"] = "deflate"
b.writeHeaders()
flateWriter, _ := flate.NewWriter(b.w, -1)
defer flateWriter.Close()
flateWriter.Write(data)
return
}
}
b.writeHeaders()
b.w.Write(data)
}
// New creates a new response builder.
func New(w http.ResponseWriter, r *http.Request) *Builder {
return &Builder{w: w, r: r, statusCode: http.StatusOK, headers: make(map[string]string), enableCompression: true}
}
v2-2.2.6/internal/http/response/builder_test.go 0000664 0000000 0000000 00000022121 14756465373 0021537 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package response // import "miniflux.app/v2/internal/http/response"
import (
"errors"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
)
func TestResponseHasCommonHeaders(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
headers := map[string]string{
"X-Content-Type-Options": "nosniff",
"X-Frame-Options": "DENY",
}
for header, expected := range headers {
actual := resp.Header.Get(header)
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
}
func TestBuildResponseWithCustomStatusCode(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithStatus(http.StatusNotAcceptable).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusNotAcceptable
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
}
func TestBuildResponseWithCustomHeader(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithHeader("X-My-Header", "Value").Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "Value"
actual := resp.Header.Get("X-My-Header")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithAttachment(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithAttachment("my_file.pdf").Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "attachment; filename=my_file.pdf"
actual := resp.Header.Get("Content-Disposition")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithError(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(errors.New("Some error")).Write()
})
handler.ServeHTTP(w, r)
expectedBody := `Some error`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
}
func TestBuildResponseWithByteBody(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody([]byte("body")).Write()
})
handler.ServeHTTP(w, r)
expectedBody := `body`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
}
func TestBuildResponseWithCachingEnabled(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithCaching("etag", 1*time.Minute, func(b *Builder) {
b.WithBody("cached body")
b.Write()
})
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusOK
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `cached body`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedHeader := "public"
actualHeader := resp.Header.Get("Cache-Control")
if actualHeader != expectedHeader {
t.Fatalf(`Unexpected cache control header, got %q instead of %q`, actualHeader, expectedHeader)
}
if resp.Header.Get("Expires") == "" {
t.Fatalf(`Expires header should not be empty`)
}
}
func TestBuildResponseWithCachingAndEtag(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("If-None-Match", "etag")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithCaching("etag", 1*time.Minute, func(b *Builder) {
b.WithBody("cached body")
b.Write()
})
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusNotModified
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := ``
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedHeader := "public"
actualHeader := resp.Header.Get("Cache-Control")
if actualHeader != expectedHeader {
t.Fatalf(`Unexpected cache control header, got %q instead of %q`, actualHeader, expectedHeader)
}
if resp.Header.Get("Expires") == "" {
t.Fatalf(`Expires header should not be empty`)
}
}
func TestBuildResponseWithBrotliCompression(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "gzip, deflate, br")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "br"
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithGzipCompression(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "gzip, deflate")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "gzip"
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithDeflateCompression(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "deflate")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "deflate"
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithCompressionDisabled(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "deflate")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).WithoutCompression().Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := ""
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithDeflateCompressionAndSmallPayload(t *testing.T) {
body := strings.Repeat("a", compressionThreshold)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "deflate")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := ""
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithoutCompressionHeader(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := ""
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
v2-2.2.6/internal/http/response/html/ 0000775 0000000 0000000 00000000000 14756465373 0017471 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/response/html/html.go 0000664 0000000 0000000 00000011347 14756465373 0020772 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package html // import "miniflux.app/v2/internal/http/response/html"
import (
"log/slog"
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response"
)
// OK creates a new HTML response with a 200 status code.
func OK(w http.ResponseWriter, r *http.Request, body interface{}) {
builder := response.New(w, r)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithBody(body)
builder.Write()
}
// ServerError sends an internal error to the client.
func ServerError(w http.ResponseWriter, r *http.Request, err error) {
slog.Error(http.StatusText(http.StatusInternalServerError),
slog.Any("error", err),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusInternalServerError),
),
)
builder := response.New(w, r)
builder.WithStatus(http.StatusInternalServerError)
builder.WithHeader("Content-Security-Policy", `default-src 'self'`)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithBody(err)
builder.Write()
}
// BadRequest sends a bad request error to the client.
func BadRequest(w http.ResponseWriter, r *http.Request, err error) {
slog.Warn(http.StatusText(http.StatusBadRequest),
slog.Any("error", err),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusBadRequest),
),
)
builder := response.New(w, r)
builder.WithStatus(http.StatusBadRequest)
builder.WithHeader("Content-Security-Policy", `default-src 'self'`)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithBody(err)
builder.Write()
}
// Forbidden sends a forbidden error to the client.
func Forbidden(w http.ResponseWriter, r *http.Request) {
slog.Warn(http.StatusText(http.StatusForbidden),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusForbidden),
),
)
builder := response.New(w, r)
builder.WithStatus(http.StatusForbidden)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithBody("Access Forbidden")
builder.Write()
}
// NotFound sends a page not found error to the client.
func NotFound(w http.ResponseWriter, r *http.Request) {
slog.Warn(http.StatusText(http.StatusNotFound),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusNotFound),
),
)
builder := response.New(w, r)
builder.WithStatus(http.StatusNotFound)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithBody("Page Not Found")
builder.Write()
}
// Redirect redirects the user to another location.
func Redirect(w http.ResponseWriter, r *http.Request, uri string) {
http.Redirect(w, r, uri, http.StatusFound)
}
// RequestedRangeNotSatisfiable sends a range not satisfiable error to the client.
func RequestedRangeNotSatisfiable(w http.ResponseWriter, r *http.Request, contentRange string) {
slog.Warn(http.StatusText(http.StatusRequestedRangeNotSatisfiable),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusRequestedRangeNotSatisfiable),
),
)
builder := response.New(w, r)
builder.WithStatus(http.StatusRequestedRangeNotSatisfiable)
builder.WithHeader("Content-Type", "text/html; charset=utf-8")
builder.WithHeader("Cache-Control", "no-cache, max-age=0, must-revalidate, no-store")
builder.WithHeader("Content-Range", contentRange)
builder.WithBody("Range Not Satisfiable")
builder.Write()
}
v2-2.2.6/internal/http/response/html/html_test.go 0000664 0000000 0000000 00000014720 14756465373 0022027 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package html // import "miniflux.app/v2/internal/http/response/html"
import (
"errors"
"net/http"
"net/http/httptest"
"testing"
)
func TestOKResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
OK(w, r, "Some HTML")
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusOK
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Some HTML`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
headers := map[string]string{
"Content-Type": "text/html; charset=utf-8",
"Cache-Control": "no-cache, max-age=0, must-revalidate, no-store",
}
for header, expected := range headers {
actual := resp.Header.Get(header)
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
}
func TestServerErrorResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ServerError(w, r, errors.New("Some error"))
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusInternalServerError
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Some error`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := "text/html; charset=utf-8"
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestBadRequestResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
BadRequest(w, r, errors.New("Some error"))
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusBadRequest
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Some error`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := "text/html; charset=utf-8"
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestForbiddenResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Forbidden(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusForbidden
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Access Forbidden`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := "text/html; charset=utf-8"
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestNotFoundResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
NotFound(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusNotFound
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Page Not Found`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := "text/html; charset=utf-8"
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestRedirectResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Redirect(w, r, "/path")
})
handler.ServeHTTP(w, r)
resp := w.Result()
defer resp.Body.Close()
expectedStatusCode := http.StatusFound
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedResult := "/path"
actualResult := resp.Header.Get("Location")
if actualResult != expectedResult {
t.Fatalf(`Unexpected redirect location, got %q instead of %q`, actualResult, expectedResult)
}
}
func TestRequestedRangeNotSatisfiable(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
RequestedRangeNotSatisfiable(w, r, "bytes */12777")
})
handler.ServeHTTP(w, r)
resp := w.Result()
defer resp.Body.Close()
expectedStatusCode := http.StatusRequestedRangeNotSatisfiable
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedContentRangeHeader := "bytes */12777"
actualContentRangeHeader := resp.Header.Get("Content-Range")
if actualContentRangeHeader != expectedContentRangeHeader {
t.Fatalf(`Unexpected content range header, got %q instead of %q`, actualContentRangeHeader, expectedContentRangeHeader)
}
}
v2-2.2.6/internal/http/response/json/ 0000775 0000000 0000000 00000000000 14756465373 0017476 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/response/json/json.go 0000664 0000000 0000000 00000014446 14756465373 0021007 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package json // import "miniflux.app/v2/internal/http/response/json"
import (
"encoding/json"
"errors"
"log/slog"
"net/http"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response"
)
const contentTypeHeader = `application/json`
// OK creates a new JSON response with a 200 status code.
func OK(w http.ResponseWriter, r *http.Request, body any) {
responseBody, err := json.Marshal(body)
if err != nil {
ServerError(w, r, err)
return
}
builder := response.New(w, r)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// Created sends a created response to the client.
func Created(w http.ResponseWriter, r *http.Request, body any) {
responseBody, err := json.Marshal(body)
if err != nil {
ServerError(w, r, err)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusCreated)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// NoContent sends a no content response to the client.
func NoContent(w http.ResponseWriter, r *http.Request) {
builder := response.New(w, r)
builder.WithStatus(http.StatusNoContent)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.Write()
}
func Accepted(w http.ResponseWriter, r *http.Request) {
builder := response.New(w, r)
builder.WithStatus(http.StatusAccepted)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.Write()
}
// ServerError sends an internal error to the client.
func ServerError(w http.ResponseWriter, r *http.Request, err error) {
slog.Error(http.StatusText(http.StatusInternalServerError),
slog.Any("error", err),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusInternalServerError),
),
)
responseBody, jsonErr := generateJSONError(err)
if jsonErr != nil {
slog.Error("Unable to generate JSON error", slog.Any("error", jsonErr))
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusInternalServerError)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// BadRequest sends a bad request error to the client.
func BadRequest(w http.ResponseWriter, r *http.Request, err error) {
slog.Warn(http.StatusText(http.StatusBadRequest),
slog.Any("error", err),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusBadRequest),
),
)
responseBody, jsonErr := generateJSONError(err)
if jsonErr != nil {
slog.Error("Unable to generate JSON error", slog.Any("error", jsonErr))
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusBadRequest)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// Unauthorized sends a not authorized error to the client.
func Unauthorized(w http.ResponseWriter, r *http.Request) {
slog.Warn(http.StatusText(http.StatusUnauthorized),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusUnauthorized),
),
)
responseBody, jsonErr := generateJSONError(errors.New("access unauthorized"))
if jsonErr != nil {
slog.Error("Unable to generate JSON error", slog.Any("error", jsonErr))
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusUnauthorized)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// Forbidden sends a forbidden error to the client.
func Forbidden(w http.ResponseWriter, r *http.Request) {
slog.Warn(http.StatusText(http.StatusForbidden),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusForbidden),
),
)
responseBody, jsonErr := generateJSONError(errors.New("access forbidden"))
if jsonErr != nil {
slog.Error("Unable to generate JSON error", slog.Any("error", jsonErr))
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusForbidden)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
// NotFound sends a page not found error to the client.
func NotFound(w http.ResponseWriter, r *http.Request) {
slog.Warn(http.StatusText(http.StatusNotFound),
slog.String("client_ip", request.ClientIP(r)),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("user_agent", r.UserAgent()),
),
slog.Group("response",
slog.Int("status_code", http.StatusNotFound),
),
)
responseBody, jsonErr := generateJSONError(errors.New("resource not found"))
if jsonErr != nil {
slog.Error("Unable to generate JSON error", slog.Any("error", jsonErr))
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
builder := response.New(w, r)
builder.WithStatus(http.StatusNotFound)
builder.WithHeader("Content-Type", contentTypeHeader)
builder.WithBody(responseBody)
builder.Write()
}
func generateJSONError(err error) ([]byte, error) {
type errorMsg struct {
ErrorMessage string `json:"error_message"`
}
encodedBody, err := json.Marshal(errorMsg{ErrorMessage: err.Error()})
if err != nil {
return nil, err
}
return encodedBody, nil
}
v2-2.2.6/internal/http/response/json/json_test.go 0000664 0000000 0000000 00000021157 14756465373 0022043 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package json // import "miniflux.app/v2/internal/http/response/json"
import (
"errors"
"net/http"
"net/http/httptest"
"testing"
)
func TestOKResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
OK(w, r, map[string]string{"key": "value"})
})
handler.ServeHTTP(w, r)
resp := w.Result()
defer resp.Body.Close()
expectedStatusCode := http.StatusOK
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"key":"value"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %q instead of %q`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestCreatedResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Created(w, r, map[string]string{"key": "value"})
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusCreated
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"key":"value"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestNoContentResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
NoContent(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusNoContent
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := ``
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestServerErrorResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ServerError(w, r, errors.New("some error"))
})
handler.ServeHTTP(w, r)
resp := w.Result()
defer resp.Body.Close()
expectedStatusCode := http.StatusInternalServerError
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"some error"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %q instead of %q`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestBadRequestResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
BadRequest(w, r, errors.New("Some Error"))
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusBadRequest
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"Some Error"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestUnauthorizedResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Unauthorized(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusUnauthorized
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"access unauthorized"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestForbiddenResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Forbidden(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusForbidden
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"access forbidden"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestNotFoundResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
NotFound(w, r)
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusNotFound
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"resource not found"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestBuildInvalidJSONResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
OK(w, r, make(chan int))
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusInternalServerError
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `{"error_message":"json: unsupported type: chan int"}`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := contentTypeHeader
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
v2-2.2.6/internal/http/response/xml/ 0000775 0000000 0000000 00000000000 14756465373 0017325 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/response/xml/xml.go 0000664 0000000 0000000 00000001533 14756465373 0020456 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package xml // import "miniflux.app/v2/internal/http/response/xml"
import (
"net/http"
"miniflux.app/v2/internal/http/response"
)
// OK writes a standard XML response with a status 200 OK.
func OK(w http.ResponseWriter, r *http.Request, body interface{}) {
builder := response.New(w, r)
builder.WithHeader("Content-Type", "text/xml; charset=utf-8")
builder.WithBody(body)
builder.Write()
}
// Attachment forces the XML document to be downloaded by the web browser.
func Attachment(w http.ResponseWriter, r *http.Request, filename string, body interface{}) {
builder := response.New(w, r)
builder.WithHeader("Content-Type", "text/xml; charset=utf-8")
builder.WithAttachment(filename)
builder.WithBody(body)
builder.Write()
}
v2-2.2.6/internal/http/response/xml/xml_test.go 0000664 0000000 0000000 00000004167 14756465373 0021523 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package xml // import "miniflux.app/v2/internal/http/response/xml"
import (
"net/http"
"net/http/httptest"
"testing"
)
func TestOKResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
OK(w, r, "Some XML")
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusOK
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Some XML`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
expectedContentType := "text/xml; charset=utf-8"
actualContentType := resp.Header.Get("Content-Type")
if actualContentType != expectedContentType {
t.Fatalf(`Unexpected content type, got %q instead of %q`, actualContentType, expectedContentType)
}
}
func TestAttachmentResponse(t *testing.T) {
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Attachment(w, r, "file.xml", "Some XML")
})
handler.ServeHTTP(w, r)
resp := w.Result()
expectedStatusCode := http.StatusOK
if resp.StatusCode != expectedStatusCode {
t.Fatalf(`Unexpected status code, got %d instead of %d`, resp.StatusCode, expectedStatusCode)
}
expectedBody := `Some XML`
actualBody := w.Body.String()
if actualBody != expectedBody {
t.Fatalf(`Unexpected body, got %s instead of %s`, actualBody, expectedBody)
}
headers := map[string]string{
"Content-Type": "text/xml; charset=utf-8",
"Content-Disposition": "attachment; filename=file.xml",
}
for header, expected := range headers {
actual := resp.Header.Get(header)
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
}
v2-2.2.6/internal/http/route/ 0000775 0000000 0000000 00000000000 14756465373 0016025 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/route/route.go 0000664 0000000 0000000 00000001353 14756465373 0017514 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package route // import "miniflux.app/v2/internal/http/route"
import (
"strconv"
"github.com/gorilla/mux"
)
// Path returns the defined route based on given arguments.
func Path(router *mux.Router, name string, args ...any) string {
route := router.Get(name)
if route == nil {
panic("route not found: " + name)
}
var pairs []string
for _, arg := range args {
switch param := arg.(type) {
case string:
pairs = append(pairs, param)
case int64:
pairs = append(pairs, strconv.FormatInt(param, 10))
}
}
result, err := route.URLPath(pairs...)
if err != nil {
panic(err)
}
return result.String()
}
v2-2.2.6/internal/http/server/ 0000775 0000000 0000000 00000000000 14756465373 0016175 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/http/server/httpd.go 0000664 0000000 0000000 00000021637 14756465373 0017660 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package httpd // import "miniflux.app/v2/internal/http/server"
import (
"crypto/tls"
"fmt"
"log/slog"
"net"
"net/http"
"os"
"strconv"
"strings"
"time"
"miniflux.app/v2/internal/api"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/fever"
"miniflux.app/v2/internal/googlereader"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/ui"
"miniflux.app/v2/internal/version"
"miniflux.app/v2/internal/worker"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus/promhttp"
"golang.org/x/crypto/acme"
"golang.org/x/crypto/acme/autocert"
)
func StartWebServer(store *storage.Storage, pool *worker.Pool) *http.Server {
certFile := config.Opts.CertFile()
keyFile := config.Opts.CertKeyFile()
certDomain := config.Opts.CertDomain()
listenAddr := config.Opts.ListenAddr()
server := &http.Server{
ReadTimeout: time.Duration(config.Opts.HTTPServerTimeout()) * time.Second,
WriteTimeout: time.Duration(config.Opts.HTTPServerTimeout()) * time.Second,
IdleTimeout: time.Duration(config.Opts.HTTPServerTimeout()) * time.Second,
Handler: setupHandler(store, pool),
}
switch {
case os.Getenv("LISTEN_PID") == strconv.Itoa(os.Getpid()):
startSystemdSocketServer(server)
case strings.HasPrefix(listenAddr, "/"):
startUnixSocketServer(server, listenAddr)
case certDomain != "":
config.Opts.HTTPS = true
startAutoCertTLSServer(server, certDomain, store)
case certFile != "" && keyFile != "":
config.Opts.HTTPS = true
server.Addr = listenAddr
startTLSServer(server, certFile, keyFile)
default:
server.Addr = listenAddr
startHTTPServer(server)
}
return server
}
func startSystemdSocketServer(server *http.Server) {
go func() {
f := os.NewFile(3, "systemd socket")
listener, err := net.FileListener(f)
if err != nil {
printErrorAndExit(`Unable to create listener from systemd socket: %v`, err)
}
slog.Info(`Starting server using systemd socket`)
if err := server.Serve(listener); err != http.ErrServerClosed {
printErrorAndExit(`Server failed to start: %v`, err)
}
}()
}
func startUnixSocketServer(server *http.Server, socketFile string) {
os.Remove(socketFile)
go func(sock string) {
listener, err := net.Listen("unix", sock)
if err != nil {
printErrorAndExit(`Server failed to start: %v`, err)
}
defer listener.Close()
if err := os.Chmod(sock, 0666); err != nil {
printErrorAndExit(`Unable to change socket permission: %v`, err)
}
slog.Info("Starting server using a Unix socket", slog.String("socket", sock))
if err := server.Serve(listener); err != http.ErrServerClosed {
printErrorAndExit(`Server failed to start: %v`, err)
}
}(socketFile)
}
func tlsConfig() *tls.Config {
// See https://blog.cloudflare.com/exposing-go-on-the-internet/
// And https://wiki.mozilla.org/Security/Server_Side_TLS
return &tls.Config{
MinVersion: tls.VersionTLS12,
CurvePreferences: []tls.CurveID{
tls.CurveP256,
tls.X25519,
},
CipherSuites: []uint16{
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
},
}
}
func startAutoCertTLSServer(server *http.Server, certDomain string, store *storage.Storage) {
server.Addr = ":https"
certManager := autocert.Manager{
Cache: storage.NewCertificateCache(store),
Prompt: autocert.AcceptTOS,
HostPolicy: autocert.HostWhitelist(certDomain),
}
server.TLSConfig = tlsConfig()
server.TLSConfig.GetCertificate = certManager.GetCertificate
server.TLSConfig.NextProtos = []string{"h2", "http/1.1", acme.ALPNProto}
// Handle http-01 challenge.
s := &http.Server{
Handler: certManager.HTTPHandler(nil),
Addr: ":http",
}
go s.ListenAndServe()
go func() {
slog.Info("Starting TLS server using automatic certificate management",
slog.String("listen_address", server.Addr),
slog.String("domain", certDomain),
)
if err := server.ListenAndServeTLS("", ""); err != http.ErrServerClosed {
printErrorAndExit(`Server failed to start: %v`, err)
}
}()
}
func startTLSServer(server *http.Server, certFile, keyFile string) {
server.TLSConfig = tlsConfig()
go func() {
slog.Info("Starting TLS server using a certificate",
slog.String("listen_address", server.Addr),
slog.String("cert_file", certFile),
slog.String("key_file", keyFile),
)
if err := server.ListenAndServeTLS(certFile, keyFile); err != http.ErrServerClosed {
printErrorAndExit(`Server failed to start: %v`, err)
}
}()
}
func startHTTPServer(server *http.Server) {
go func() {
slog.Info("Starting HTTP server",
slog.String("listen_address", server.Addr),
)
if err := server.ListenAndServe(); err != http.ErrServerClosed {
printErrorAndExit(`Server failed to start: %v`, err)
}
}()
}
func setupHandler(store *storage.Storage, pool *worker.Pool) *mux.Router {
router := mux.NewRouter()
if config.Opts.BasePath() != "" {
router = router.PathPrefix(config.Opts.BasePath()).Subrouter()
}
if config.Opts.HasMaintenanceMode() {
router.Use(func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(config.Opts.MaintenanceMessage()))
})
})
}
router.Use(middleware)
fever.Serve(router, store)
googlereader.Serve(router, store)
api.Serve(router, store, pool)
ui.Serve(router, store, pool)
router.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) {
if err := store.Ping(); err != nil {
http.Error(w, "Database Connection Error", http.StatusInternalServerError)
return
}
w.Write([]byte("OK"))
}).Name("healthcheck")
router.HandleFunc("/version", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(version.Version))
}).Name("version")
if config.Opts.HasMetricsCollector() {
router.Handle("/metrics", promhttp.Handler()).Name("metrics")
router.Use(func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
route := mux.CurrentRoute(r)
// Returns a 404 if the client is not authorized to access the metrics endpoint.
if route.GetName() == "metrics" && !isAllowedToAccessMetricsEndpoint(r) {
slog.Warn("Authentication failed while accessing the metrics endpoint",
slog.String("client_ip", request.ClientIP(r)),
slog.String("client_user_agent", r.UserAgent()),
slog.String("client_remote_addr", r.RemoteAddr),
)
http.NotFound(w, r)
return
}
next.ServeHTTP(w, r)
})
})
}
return router
}
func isAllowedToAccessMetricsEndpoint(r *http.Request) bool {
clientIP := request.ClientIP(r)
if config.Opts.MetricsUsername() != "" && config.Opts.MetricsPassword() != "" {
username, password, authOK := r.BasicAuth()
if !authOK {
slog.Warn("Metrics endpoint accessed without authentication header",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("client_user_agent", r.UserAgent()),
slog.String("client_remote_addr", r.RemoteAddr),
)
return false
}
if username == "" || password == "" {
slog.Warn("Metrics endpoint accessed with empty username or password",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("client_user_agent", r.UserAgent()),
slog.String("client_remote_addr", r.RemoteAddr),
)
return false
}
if username != config.Opts.MetricsUsername() || password != config.Opts.MetricsPassword() {
slog.Warn("Metrics endpoint accessed with invalid username or password",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("client_user_agent", r.UserAgent()),
slog.String("client_remote_addr", r.RemoteAddr),
)
return false
}
}
remoteIP := request.FindRemoteIP(r)
if remoteIP == "@" {
// This indicates a request sent via a Unix socket, always consider these trusted.
return true
}
for _, cidr := range config.Opts.MetricsAllowedNetworks() {
_, network, err := net.ParseCIDR(cidr)
if err != nil {
slog.Error("Metrics endpoint accessed with invalid CIDR",
slog.Bool("authentication_failed", true),
slog.String("client_ip", clientIP),
slog.String("client_user_agent", r.UserAgent()),
slog.String("client_remote_addr", r.RemoteAddr),
slog.String("cidr", cidr),
)
return false
}
// We use r.RemoteAddr in this case because HTTP headers like X-Forwarded-For can be easily spoofed.
// The recommendation is to use HTTP Basic authentication.
if network.Contains(net.ParseIP(remoteIP)) {
return true
}
}
return false
}
func printErrorAndExit(format string, a ...any) {
message := fmt.Sprintf(format, a...)
slog.Error(message)
fmt.Fprintf(os.Stderr, "%v\n", message)
os.Exit(1)
}
v2-2.2.6/internal/http/server/middleware.go 0000664 0000000 0000000 00000002171 14756465373 0020642 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package httpd // import "miniflux.app/v2/internal/http/server"
import (
"context"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
)
func middleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
clientIP := request.FindClientIP(r)
ctx := r.Context()
ctx = context.WithValue(ctx, request.ClientIPContextKey, clientIP)
if r.Header.Get("X-Forwarded-Proto") == "https" {
config.Opts.HTTPS = true
}
t1 := time.Now()
defer func() {
slog.Debug("Incoming request",
slog.String("client_ip", clientIP),
slog.Group("request",
slog.String("method", r.Method),
slog.String("uri", r.RequestURI),
slog.String("protocol", r.Proto),
slog.Duration("execution_time", time.Since(t1)),
),
)
}()
if config.Opts.HTTPS && config.Opts.HasHSTS() {
w.Header().Set("Strict-Transport-Security", "max-age=31536000")
}
next.ServeHTTP(w, r.WithContext(ctx))
})
}
v2-2.2.6/internal/integration/ 0000775 0000000 0000000 00000000000 14756465373 0016233 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/apprise/ 0000775 0000000 0000000 00000000000 14756465373 0017676 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/apprise/apprise.go 0000664 0000000 0000000 00000004137 14756465373 0021675 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package apprise
import (
"bytes"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
servicesURL string
baseURL string
}
func NewClient(serviceURL, baseURL string) *Client {
return &Client{serviceURL, baseURL}
}
func (c *Client) SendNotification(feed *model.Feed, entries model.Entries) error {
if c.baseURL == "" || c.servicesURL == "" {
return fmt.Errorf("apprise: missing base URL or services URL")
}
for _, entry := range entries {
message := "[" + entry.Title + "]" + "(" + entry.URL + ")" + "\n\n"
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.baseURL, "/notify")
if err != nil {
return fmt.Errorf(`apprise: invalid API endpoint: %v`, err)
}
requestBody, err := json.Marshal(map[string]any{
"urls": c.servicesURL,
"body": message,
"title": feed.Title,
})
if err != nil {
return fmt.Errorf("apprise: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("apprise: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
slog.Debug("Sending Apprise notification",
slog.String("apprise_url", c.baseURL),
slog.String("services_url", c.servicesURL),
slog.String("title", feed.Title),
slog.String("body", message),
slog.String("entry_url", entry.URL),
)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("apprise: unable to send request: %v", err)
}
response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("apprise: unable to send a notification: url=%s status=%d", apiEndpoint, response.StatusCode)
}
}
return nil
}
v2-2.2.6/internal/integration/betula/ 0000775 0000000 0000000 00000000000 14756465373 0017507 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/betula/betula.go 0000664 0000000 0000000 00000002701 14756465373 0021312 0 ustar 00root root 0000000 0000000 package betula
import (
"fmt"
"net/http"
"net/url"
"strings"
"time"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
url string
token string
}
func NewClient(url, token string) *Client {
return &Client{url: url, token: token}
}
func (c *Client) CreateBookmark(entryURL, entryTitle string, tags []string) error {
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.url, "/save-link")
if err != nil {
return fmt.Errorf("betula: unable to generate save-link endpoint: %v", err)
}
values := url.Values{}
values.Add("url", entryURL)
values.Add("title", entryTitle)
values.Add("tags", strings.Join(tags, ","))
request, err := http.NewRequest(http.MethodPost, apiEndpoint+"?"+values.Encode(), nil)
if err != nil {
return fmt.Errorf("betula: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/x-www-form-urlencoded")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.AddCookie(&http.Cookie{Name: "betula-token", Value: c.token})
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("betula: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("betula: unable to create bookmark: url=%s status=%d", apiEndpoint, response.StatusCode)
}
return nil
}
v2-2.2.6/internal/integration/cubox/ 0000775 0000000 0000000 00000000000 14756465373 0017353 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/cubox/cubox.go 0000664 0000000 0000000 00000003131 14756465373 0021020 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
// Cubox API documentation: https://help.cubox.cc/save/api/
package cubox // import "miniflux.app/v2/internal/integration/cubox"
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"time"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
apiLink string
}
func NewClient(apiLink string) *Client {
return &Client{apiLink: apiLink}
}
func (c *Client) SaveLink(entryURL string) error {
if c.apiLink == "" {
return errors.New("cubox: missing API link")
}
requestBody, err := json.Marshal(&card{
Type: "url",
Content: entryURL,
})
if err != nil {
return fmt.Errorf("cubox: unable to encode request body: %w", err)
}
ctx, cancel := context.WithTimeout(context.Background(), defaultClientTimeout)
defer cancel()
request, err := http.NewRequestWithContext(ctx, http.MethodPost, c.apiLink, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("cubox: unable to create request: %w", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
response, err := http.DefaultClient.Do(request)
if err != nil {
return fmt.Errorf("cubox: unable to send request: %w", err)
}
defer response.Body.Close()
if response.StatusCode != 200 {
return fmt.Errorf("cubox: unable to save link: status=%d", response.StatusCode)
}
return nil
}
type card struct {
Type string `json:"type"`
Content string `json:"content"`
}
v2-2.2.6/internal/integration/discord/ 0000775 0000000 0000000 00000000000 14756465373 0017662 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/discord/discord.go 0000664 0000000 0000000 00000005221 14756465373 0021640 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
// Discord Webhooks documentation: https://discord.com/developers/docs/resources/webhook
package discord // import "miniflux.app/v2/internal/integration/discord"
import (
"bytes"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"time"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
const discordMsgColor = 5793266
type Client struct {
webhookURL string
}
func NewClient(webhookURL string) *Client {
return &Client{webhookURL: webhookURL}
}
func (c *Client) SendDiscordMsg(feed *model.Feed, entries model.Entries) error {
for _, entry := range entries {
requestBody, err := json.Marshal(&discordMessage{
Embeds: []discordEmbed{
{
Title: "RSS feed update from Miniflux",
Color: discordMsgColor,
Fields: []discordFields{
{
Name: "Updated feed",
Value: feed.Title,
},
{
Name: "Article link",
Value: "[" + entry.Title + "]" + "(" + entry.URL + ")",
},
{
Name: "Author",
Value: entry.Author,
Inline: true,
},
{
Name: "Source website",
Value: urllib.RootURL(feed.SiteURL),
Inline: true,
},
},
},
},
})
if err != nil {
return fmt.Errorf("discord: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, c.webhookURL, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("discord: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
slog.Debug("Sending Discord notification",
slog.String("webhookURL", c.webhookURL),
slog.String("title", feed.Title),
slog.String("entry_url", entry.URL),
)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("discord: unable to send request: %v", err)
}
response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("discord: unable to send a notification: url=%s status=%d", c.webhookURL, response.StatusCode)
}
}
return nil
}
type discordFields struct {
Name string `json:"name"`
Value string `json:"value"`
Inline bool `json:"inline,omitempty"`
}
type discordEmbed struct {
Title string `json:"title"`
Color int `json:"color"`
Fields []discordFields `json:"fields"`
}
type discordMessage struct {
Embeds []discordEmbed `json:"embeds"`
}
v2-2.2.6/internal/integration/espial/ 0000775 0000000 0000000 00000000000 14756465373 0017510 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/espial/espial.go 0000664 0000000 0000000 00000004122 14756465373 0021313 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package espial // import "miniflux.app/v2/internal/integration/espial"
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"time"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
baseURL string
apiKey string
}
func NewClient(baseURL, apiKey string) *Client {
return &Client{baseURL: baseURL, apiKey: apiKey}
}
func (c *Client) CreateLink(entryURL, entryTitle, espialTags string) error {
if c.baseURL == "" || c.apiKey == "" {
return fmt.Errorf("espial: missing base URL or API key")
}
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.baseURL, "/api/add")
if err != nil {
return fmt.Errorf("espial: invalid API endpoint: %v", err)
}
requestBody, err := json.Marshal(&espialDocument{
Title: entryTitle,
Url: entryURL,
ToRead: true,
Tags: espialTags,
})
if err != nil {
return fmt.Errorf("espial: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("espial: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.Header.Set("Authorization", "ApiKey "+c.apiKey)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("espial: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode != http.StatusCreated {
responseBody := new(bytes.Buffer)
responseBody.ReadFrom(response.Body)
return fmt.Errorf("espial: unable to create link: url=%s status=%d body=%s", apiEndpoint, response.StatusCode, responseBody.String())
}
return nil
}
type espialDocument struct {
Title string `json:"title,omitempty"`
Url string `json:"url,omitempty"`
ToRead bool `json:"toread,omitempty"`
Tags string `json:"tags,omitempty"`
}
v2-2.2.6/internal/integration/instapaper/ 0000775 0000000 0000000 00000000000 14756465373 0020401 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/instapaper/instapaper.go 0000664 0000000 0000000 00000003032 14756465373 0023074 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package instapaper // import "miniflux.app/v2/internal/integration/instapaper"
import (
"fmt"
"net/http"
"net/url"
"time"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
username string
password string
}
func NewClient(username, password string) *Client {
return &Client{username: username, password: password}
}
func (c *Client) AddURL(entryURL, entryTitle string) error {
if c.username == "" || c.password == "" {
return fmt.Errorf("instapaper: missing username or password")
}
values := url.Values{}
values.Add("url", entryURL)
values.Add("title", entryTitle)
apiEndpoint := "https://www.instapaper.com/api/add?" + values.Encode()
request, err := http.NewRequest(http.MethodGet, apiEndpoint, nil)
if err != nil {
return fmt.Errorf("instapaper: unable to create request: %v", err)
}
request.SetBasicAuth(c.username, c.password)
request.Header.Set("Content-Type", "application/x-www-form-urlencoded")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("instapaper: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode != http.StatusCreated {
return fmt.Errorf("instapaper: unable to add URL: url=%s status=%d", apiEndpoint, response.StatusCode)
}
return nil
}
v2-2.2.6/internal/integration/integration.go 0000664 0000000 0000000 00000045551 14756465373 0021117 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package integration // import "miniflux.app/v2/internal/integration"
import (
"log/slog"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/integration/apprise"
"miniflux.app/v2/internal/integration/betula"
"miniflux.app/v2/internal/integration/cubox"
"miniflux.app/v2/internal/integration/discord"
"miniflux.app/v2/internal/integration/espial"
"miniflux.app/v2/internal/integration/instapaper"
"miniflux.app/v2/internal/integration/linkace"
"miniflux.app/v2/internal/integration/linkding"
"miniflux.app/v2/internal/integration/linkwarden"
"miniflux.app/v2/internal/integration/matrixbot"
"miniflux.app/v2/internal/integration/notion"
"miniflux.app/v2/internal/integration/ntfy"
"miniflux.app/v2/internal/integration/nunuxkeeper"
"miniflux.app/v2/internal/integration/omnivore"
"miniflux.app/v2/internal/integration/pinboard"
"miniflux.app/v2/internal/integration/pocket"
"miniflux.app/v2/internal/integration/pushover"
"miniflux.app/v2/internal/integration/raindrop"
"miniflux.app/v2/internal/integration/readeck"
"miniflux.app/v2/internal/integration/readwise"
"miniflux.app/v2/internal/integration/shaarli"
"miniflux.app/v2/internal/integration/shiori"
"miniflux.app/v2/internal/integration/slack"
"miniflux.app/v2/internal/integration/telegrambot"
"miniflux.app/v2/internal/integration/wallabag"
"miniflux.app/v2/internal/integration/webhook"
"miniflux.app/v2/internal/model"
)
// SendEntry sends the entry to third-party providers when the user click on "Save".
func SendEntry(entry *model.Entry, userIntegrations *model.Integration) {
if userIntegrations.BetulaEnabled {
slog.Debug("Sending entry to Betula",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := betula.NewClient(userIntegrations.BetulaURL, userIntegrations.BetulaToken)
err := client.CreateBookmark(
entry.URL,
entry.Title,
entry.Tags,
)
if err != nil {
slog.Error("Unable to send entry to Betula",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.PinboardEnabled {
slog.Debug("Sending entry to Pinboard",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := pinboard.NewClient(userIntegrations.PinboardToken)
err := client.CreateBookmark(
entry.URL,
entry.Title,
userIntegrations.PinboardTags,
userIntegrations.PinboardMarkAsUnread,
)
if err != nil {
slog.Error("Unable to send entry to Pinboard",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.InstapaperEnabled {
slog.Debug("Sending entry to Instapaper",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := instapaper.NewClient(userIntegrations.InstapaperUsername, userIntegrations.InstapaperPassword)
if err := client.AddURL(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Instapaper",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.WallabagEnabled {
slog.Debug("Sending entry to Wallabag",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := wallabag.NewClient(
userIntegrations.WallabagURL,
userIntegrations.WallabagClientID,
userIntegrations.WallabagClientSecret,
userIntegrations.WallabagUsername,
userIntegrations.WallabagPassword,
userIntegrations.WallabagOnlyURL,
)
if err := client.CreateEntry(entry.URL, entry.Title, entry.Content); err != nil {
slog.Error("Unable to send entry to Wallabag",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.NotionEnabled {
slog.Debug("Sending entry to Notion",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := notion.NewClient(
userIntegrations.NotionToken,
userIntegrations.NotionPageID,
)
if err := client.UpdateDocument(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Notion",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.NunuxKeeperEnabled {
slog.Debug("Sending entry to NunuxKeeper",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := nunuxkeeper.NewClient(
userIntegrations.NunuxKeeperURL,
userIntegrations.NunuxKeeperAPIKey,
)
if err := client.AddEntry(entry.URL, entry.Title, entry.Content); err != nil {
slog.Error("Unable to send entry to NunuxKeeper",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.EspialEnabled {
slog.Debug("Sending entry to Espial",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := espial.NewClient(
userIntegrations.EspialURL,
userIntegrations.EspialAPIKey,
)
if err := client.CreateLink(entry.URL, entry.Title, userIntegrations.EspialTags); err != nil {
slog.Error("Unable to send entry to Espial",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.PocketEnabled {
slog.Debug("Sending entry to Pocket",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := pocket.NewClient(config.Opts.PocketConsumerKey(userIntegrations.PocketConsumerKey), userIntegrations.PocketAccessToken)
if err := client.AddURL(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Pocket",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.LinkAceEnabled {
slog.Debug("Sending entry to LinkAce",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := linkace.NewClient(
userIntegrations.LinkAceURL,
userIntegrations.LinkAceAPIKey,
userIntegrations.LinkAceTags,
userIntegrations.LinkAcePrivate,
userIntegrations.LinkAceCheckDisabled,
)
if err := client.AddURL(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to LinkAce",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.LinkdingEnabled {
slog.Debug("Sending entry to Linkding",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := linkding.NewClient(
userIntegrations.LinkdingURL,
userIntegrations.LinkdingAPIKey,
userIntegrations.LinkdingTags,
userIntegrations.LinkdingMarkAsUnread,
)
if err := client.CreateBookmark(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Linkding",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.LinkwardenEnabled {
slog.Debug("Sending entry to linkwarden",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := linkwarden.NewClient(
userIntegrations.LinkwardenURL,
userIntegrations.LinkwardenAPIKey,
)
if err := client.CreateBookmark(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Linkwarden",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.ReadeckEnabled {
slog.Debug("Sending entry to Readeck",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := readeck.NewClient(
userIntegrations.ReadeckURL,
userIntegrations.ReadeckAPIKey,
userIntegrations.ReadeckLabels,
userIntegrations.ReadeckOnlyURL,
)
if err := client.CreateBookmark(entry.URL, entry.Title, entry.Content); err != nil {
slog.Error("Unable to send entry to Readeck",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.ReadwiseEnabled {
slog.Debug("Sending entry to Readwise",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := readwise.NewClient(
userIntegrations.ReadwiseAPIKey,
)
if err := client.CreateDocument(entry.URL); err != nil {
slog.Error("Unable to send entry to Readwise",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.CuboxEnabled {
slog.Debug("Sending entry to Cubox",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := cubox.NewClient(userIntegrations.CuboxAPILink)
if err := client.SaveLink(entry.URL); err != nil {
slog.Error("Unable to send entry to Cubox",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.ShioriEnabled {
slog.Debug("Sending entry to Shiori",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := shiori.NewClient(
userIntegrations.ShioriURL,
userIntegrations.ShioriUsername,
userIntegrations.ShioriPassword,
)
if err := client.CreateBookmark(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Shiori",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.ShaarliEnabled {
slog.Debug("Sending entry to Shaarli",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := shaarli.NewClient(
userIntegrations.ShaarliURL,
userIntegrations.ShaarliAPISecret,
)
if err := client.CreateLink(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Shaarli",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.WebhookEnabled {
slog.Debug("Sending entry to Webhook",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.String("webhook_url", userIntegrations.WebhookURL),
)
webhookClient := webhook.NewClient(userIntegrations.WebhookURL, userIntegrations.WebhookSecret)
if err := webhookClient.SendSaveEntryWebhookEvent(entry); err != nil {
slog.Error("Unable to send entry to Webhook",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.String("webhook_url", userIntegrations.WebhookURL),
slog.Any("error", err),
)
}
}
if userIntegrations.OmnivoreEnabled {
slog.Debug("Sending entry to Omnivore",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := omnivore.NewClient(userIntegrations.OmnivoreAPIKey, userIntegrations.OmnivoreURL)
if err := client.SaveUrl(entry.URL); err != nil {
slog.Error("Unable to send entry to Omnivore",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
if userIntegrations.RaindropEnabled {
slog.Debug("Sending entry to Raindrop",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
client := raindrop.NewClient(userIntegrations.RaindropToken, userIntegrations.RaindropCollectionID, userIntegrations.RaindropTags)
if err := client.CreateRaindrop(entry.URL, entry.Title); err != nil {
slog.Error("Unable to send entry to Raindrop",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
}
// PushEntries pushes a list of entries to activated third-party providers during feed refreshes.
func PushEntries(feed *model.Feed, entries model.Entries, userIntegrations *model.Integration) {
if userIntegrations.MatrixBotEnabled {
slog.Debug("Sending new entries to Matrix",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
err := matrixbot.PushEntries(
feed,
entries,
userIntegrations.MatrixBotURL,
userIntegrations.MatrixBotUser,
userIntegrations.MatrixBotPassword,
userIntegrations.MatrixBotChatID,
)
if err != nil {
slog.Error("Unable to send new entries to Matrix",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
slog.Any("error", err),
)
}
}
if userIntegrations.WebhookEnabled {
var webhookURL string
if feed.WebhookURL != "" {
webhookURL = feed.WebhookURL
} else {
webhookURL = userIntegrations.WebhookURL
}
slog.Debug("Sending new entries to Webhook",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
slog.String("webhook_url", webhookURL),
)
webhookClient := webhook.NewClient(webhookURL, userIntegrations.WebhookSecret)
if err := webhookClient.SendNewEntriesWebhookEvent(feed, entries); err != nil {
slog.Debug("Unable to send new entries to Webhook",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
slog.String("webhook_url", webhookURL),
slog.Any("error", err),
)
}
}
if userIntegrations.NtfyEnabled && feed.NtfyEnabled {
slog.Debug("Sending new entries to Ntfy",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
client := ntfy.NewClient(
userIntegrations.NtfyURL,
userIntegrations.NtfyTopic,
userIntegrations.NtfyAPIToken,
userIntegrations.NtfyUsername,
userIntegrations.NtfyPassword,
userIntegrations.NtfyIconURL,
userIntegrations.NtfyInternalLinks,
feed.NtfyPriority,
)
if err := client.SendMessages(feed, entries); err != nil {
slog.Warn("Unable to send new entries to Ntfy", slog.Any("error", err))
}
}
if userIntegrations.AppriseEnabled {
slog.Debug("Sending new entries to Apprise",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
appriseServiceURLs := userIntegrations.AppriseServicesURL
if feed.AppriseServiceURLs != "" {
appriseServiceURLs = feed.AppriseServiceURLs
}
client := apprise.NewClient(
appriseServiceURLs,
userIntegrations.AppriseURL,
)
if err := client.SendNotification(feed, entries); err != nil {
slog.Warn("Unable to send new entries to Apprise", slog.Any("error", err))
}
}
if userIntegrations.DiscordEnabled {
slog.Debug("Sending new entries to Discord",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
client := discord.NewClient(
userIntegrations.DiscordWebhookLink,
)
if err := client.SendDiscordMsg(feed, entries); err != nil {
slog.Warn("Unable to send new entries to Discord", slog.Any("error", err))
}
}
if userIntegrations.SlackEnabled {
slog.Debug("Sending new entries to Slack",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
client := slack.NewClient(
userIntegrations.SlackWebhookLink,
)
if err := client.SendSlackMsg(feed, entries); err != nil {
slog.Warn("Unable to send new entries to Slack", slog.Any("error", err))
}
}
if userIntegrations.PushoverEnabled && feed.PushoverEnabled {
slog.Debug("Sending new entries to Pushover",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int("nb_entries", len(entries)),
slog.Int64("feed_id", feed.ID),
)
client := pushover.New(
userIntegrations.PushoverUser,
userIntegrations.PushoverToken,
feed.PushoverPriority,
userIntegrations.PushoverDevice,
userIntegrations.PushoverPrefix,
)
if err := client.SendMessages(feed, entries); err != nil {
slog.Warn("Unable to send new entries to Pushover", slog.Any("error", err))
}
}
// Integrations that only support sending individual entries
if userIntegrations.TelegramBotEnabled {
for _, entry := range entries {
if userIntegrations.TelegramBotEnabled {
slog.Debug("Sending a new entry to Telegram",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
)
if err := telegrambot.PushEntry(
feed,
entry,
userIntegrations.TelegramBotToken,
userIntegrations.TelegramBotChatID,
userIntegrations.TelegramBotTopicID,
userIntegrations.TelegramBotDisableWebPagePreview,
userIntegrations.TelegramBotDisableNotification,
userIntegrations.TelegramBotDisableButtons,
); err != nil {
slog.Error("Unable to send entry to Telegram",
slog.Int64("user_id", userIntegrations.UserID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Any("error", err),
)
}
}
}
}
}
v2-2.2.6/internal/integration/linkace/ 0000775 0000000 0000000 00000000000 14756465373 0017641 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/linkace/linkace.go 0000664 0000000 0000000 00000004421 14756465373 0021577 0 ustar 00root root 0000000 0000000 package linkace
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
baseURL string
apiKey string
tags string
private bool
checkDisabled bool
}
func NewClient(baseURL, apiKey, tags string, private bool, checkDisabled bool) *Client {
return &Client{baseURL: baseURL, apiKey: apiKey, tags: tags, private: private, checkDisabled: checkDisabled}
}
func (c *Client) AddURL(entryURL, entryTitle string) error {
if c.baseURL == "" || c.apiKey == "" {
return fmt.Errorf("linkace: missing base URL or API key")
}
tagsSplitFn := func(c rune) bool {
return c == ',' || c == ' '
}
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.baseURL, "/api/v2/links")
if err != nil {
return fmt.Errorf("linkace: invalid API endpoint: %v", err)
}
requestBody, err := json.Marshal(&createItemRequest{
Url: entryURL,
Title: entryTitle,
Tags: strings.FieldsFunc(c.tags, tagsSplitFn),
Private: c.private,
CheckDisabled: c.checkDisabled,
})
if err != nil {
return fmt.Errorf("linkace: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("linkace: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("Accept", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.Header.Set("Authorization", "Bearer "+c.apiKey)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("linkace: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("linkace: unable to create item: url=%s status=%d", apiEndpoint, response.StatusCode)
}
return nil
}
type createItemRequest struct {
Title string `json:"title,omitempty"`
Url string `json:"url"`
Tags []string `json:"tags,omitempty"`
Private bool `json:"is_private,omitempty"`
CheckDisabled bool `json:"check_disabled,omitempty"`
}
v2-2.2.6/internal/integration/linkding/ 0000775 0000000 0000000 00000000000 14756465373 0020032 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/linkding/linkding.go 0000664 0000000 0000000 00000004312 14756465373 0022160 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package linkding // import "miniflux.app/v2/internal/integration/linkding"
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
baseURL string
apiKey string
tags string
unread bool
}
func NewClient(baseURL, apiKey, tags string, unread bool) *Client {
return &Client{baseURL: baseURL, apiKey: apiKey, tags: tags, unread: unread}
}
func (c *Client) CreateBookmark(entryURL, entryTitle string) error {
if c.baseURL == "" || c.apiKey == "" {
return fmt.Errorf("linkding: missing base URL or API key")
}
tagsSplitFn := func(c rune) bool {
return c == ',' || c == ' '
}
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.baseURL, "/api/bookmarks/")
if err != nil {
return fmt.Errorf(`linkding: invalid API endpoint: %v`, err)
}
requestBody, err := json.Marshal(&linkdingBookmark{
Url: entryURL,
Title: entryTitle,
TagNames: strings.FieldsFunc(c.tags, tagsSplitFn),
Unread: c.unread,
})
if err != nil {
return fmt.Errorf("linkding: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("linkding: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.Header.Set("Authorization", "Token "+c.apiKey)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("linkding: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("linkding: unable to create bookmark: url=%s status=%d", apiEndpoint, response.StatusCode)
}
return nil
}
type linkdingBookmark struct {
Url string `json:"url,omitempty"`
Title string `json:"title,omitempty"`
TagNames []string `json:"tag_names,omitempty"`
Unread bool `json:"unread,omitempty"`
}
v2-2.2.6/internal/integration/linkwarden/ 0000775 0000000 0000000 00000000000 14756465373 0020371 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/linkwarden/linkwarden.go 0000664 0000000 0000000 00000004416 14756465373 0023063 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package linkwarden // import "miniflux.app/v2/internal/integration/linkwarden"
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"time"
"miniflux.app/v2/internal/urllib"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
baseURL string
apiKey string
}
func NewClient(baseURL, apiKey string) *Client {
return &Client{baseURL: baseURL, apiKey: apiKey}
}
func (c *Client) CreateBookmark(entryURL, entryTitle string) error {
if c.baseURL == "" || c.apiKey == "" {
return fmt.Errorf("linkwarden: missing base URL or API key")
}
apiEndpoint, err := urllib.JoinBaseURLAndPath(c.baseURL, "/api/v1/links")
if err != nil {
return fmt.Errorf(`linkwarden: invalid API endpoint: %v`, err)
}
requestBody, err := json.Marshal(&linkwardenBookmark{
Url: entryURL,
Name: "",
Description: "",
Tags: []string{},
Collection: map[string]interface{}{},
})
if err != nil {
return fmt.Errorf("linkwarden: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewReader(requestBody))
if err != nil {
return fmt.Errorf("linkwarden: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.AddCookie(&http.Cookie{Name: "__Secure-next-auth.session-token", Value: c.apiKey})
request.AddCookie(&http.Cookie{Name: "next-auth.session-token", Value: c.apiKey})
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return fmt.Errorf("linkwarden: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return fmt.Errorf("linkwarden: unable to create link: url=%s status=%d", apiEndpoint, response.StatusCode)
}
return nil
}
type linkwardenBookmark struct {
Url string `json:"url"`
Name string `json:"name"`
Description string `json:"description"`
Tags []string `json:"tags"`
Collection map[string]interface{} `json:"collection"`
}
v2-2.2.6/internal/integration/matrixbot/ 0000775 0000000 0000000 00000000000 14756465373 0020244 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/integration/matrixbot/client.go 0000664 0000000 0000000 00000014265 14756465373 0022061 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package matrixbot // import "miniflux.app/v2/internal/integration/matrixbot"
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"net/url"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/version"
)
const defaultClientTimeout = 10 * time.Second
type Client struct {
matrixBaseURL string
}
func NewClient(matrixBaseURL string) *Client {
return &Client{matrixBaseURL: matrixBaseURL}
}
// Specs: https://spec.matrix.org/v1.8/client-server-api/#getwell-knownmatrixclient
func (c *Client) DiscoverEndpoints() (*DiscoveryEndpointResponse, error) {
endpointURL, err := url.JoinPath(c.matrixBaseURL, "/.well-known/matrix/client")
if err != nil {
return nil, fmt.Errorf("matrix: unable to join base URL and path: %w", err)
}
request, err := http.NewRequest(http.MethodGet, endpointURL, nil)
if err != nil {
return nil, fmt.Errorf("matrix: unable to create request: %v", err)
}
request.Header.Set("Accept", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return nil, fmt.Errorf("matrix: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return nil, fmt.Errorf("matrix: unexpected response from %s status code is %d", endpointURL, response.StatusCode)
}
var discoveryEndpointResponse DiscoveryEndpointResponse
if err := json.NewDecoder(response.Body).Decode(&discoveryEndpointResponse); err != nil {
return nil, fmt.Errorf("matrix: unable to decode discovery response: %w", err)
}
return &discoveryEndpointResponse, nil
}
// Specs https://spec.matrix.org/v1.8/client-server-api/#post_matrixclientv3login
func (c *Client) Login(homeServerURL, matrixUsername, matrixPassword string) (*LoginResponse, error) {
endpointURL, err := url.JoinPath(homeServerURL, "/_matrix/client/v3/login")
if err != nil {
return nil, fmt.Errorf("matrix: unable to join base URL and path: %w", err)
}
loginRequest := LoginRequest{
Type: "m.login.password",
Identifier: UserIdentifier{
Type: "m.id.user",
User: matrixUsername,
},
Password: matrixPassword,
}
requestBody, err := json.Marshal(loginRequest)
if err != nil {
return nil, fmt.Errorf("matrix: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPost, endpointURL, bytes.NewReader(requestBody))
if err != nil {
return nil, fmt.Errorf("matrix: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("Accept", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return nil, fmt.Errorf("matrix: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return nil, fmt.Errorf("matrix: unexpected response from %s status code is %d", endpointURL, response.StatusCode)
}
var loginResponse LoginResponse
if err := json.NewDecoder(response.Body).Decode(&loginResponse); err != nil {
return nil, fmt.Errorf("matrix: unable to decode login response: %w", err)
}
return &loginResponse, nil
}
// Specs https://spec.matrix.org/v1.8/client-server-api/#put_matrixclientv3roomsroomidsendeventtypetxnid
func (c *Client) SendFormattedTextMessage(homeServerURL, accessToken, roomID, textMessage, formattedMessage string) (*RoomEventResponse, error) {
txnID := crypto.GenerateRandomStringHex(10)
endpointURL, err := url.JoinPath(homeServerURL, "/_matrix/client/v3/rooms/", roomID, "/send/m.room.message/", txnID)
if err != nil {
return nil, fmt.Errorf("matrix: unable to join base URL and path: %w", err)
}
messageEvent := TextMessageEventRequest{
MsgType: "m.text",
Body: textMessage,
Format: "org.matrix.custom.html",
FormattedBody: formattedMessage,
}
requestBody, err := json.Marshal(messageEvent)
if err != nil {
return nil, fmt.Errorf("matrix: unable to encode request body: %v", err)
}
request, err := http.NewRequest(http.MethodPut, endpointURL, bytes.NewReader(requestBody))
if err != nil {
return nil, fmt.Errorf("matrix: unable to create request: %v", err)
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("Accept", "application/json")
request.Header.Set("User-Agent", "Miniflux/"+version.Version)
request.Header.Set("Authorization", "Bearer "+accessToken)
httpClient := &http.Client{Timeout: defaultClientTimeout}
response, err := httpClient.Do(request)
if err != nil {
return nil, fmt.Errorf("matrix: unable to send request: %v", err)
}
defer response.Body.Close()
if response.StatusCode >= 400 {
return nil, fmt.Errorf("matrix: unexpected response from %s status code is %d", endpointURL, response.StatusCode)
}
var eventResponse RoomEventResponse
if err := json.NewDecoder(response.Body).Decode(&eventResponse); err != nil {
return nil, fmt.Errorf("matrix: unable to decode event response: %w", err)
}
return &eventResponse, nil
}
type HomeServerInformation struct {
BaseURL string `json:"base_url"`
}
type IdentityServerInformation struct {
BaseURL string `json:"base_url"`
}
type DiscoveryEndpointResponse struct {
HomeServerInformation HomeServerInformation `json:"m.homeserver"`
IdentityServerInformation IdentityServerInformation `json:"m.identity_server"`
}
type UserIdentifier struct {
Type string `json:"type"`
User string `json:"user"`
}
type LoginRequest struct {
Type string `json:"type"`
Identifier UserIdentifier `json:"identifier"`
Password string `json:"password"`
}
type LoginResponse struct {
UserID string `json:"user_id"`
AccessToken string `json:"access_token"`
DeviceID string `json:"device_id"`
HomeServer string `json:"home_server"`
}
type TextMessageEventRequest struct {
MsgType string `json:"msgtype"`
Body string `json:"body"`
Format string `json:"format"`
FormattedBody string `json:"formatted_body"`
}
type RoomEventResponse struct {
EventID string `json:"event_id"`
}
v2-2.2.6/internal/integration/matrixbot/matrixbot.go 0000664 0000000 0000000 00000002476 14756465373 0022615 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package matrixbot // import "miniflux.app/v2/internal/integration/matrixbot"
import (
"fmt"
"strings"
"miniflux.app/v2/internal/model"
)
// PushEntries pushes entries to matrix chat using integration settings provided
func PushEntries(feed *model.Feed, entries model.Entries, matrixBaseURL, matrixUsername, matrixPassword, matrixRoomID string) error {
client := NewClient(matrixBaseURL)
discovery, err := client.DiscoverEndpoints()
if err != nil {
return err
}
loginResponse, err := client.Login(discovery.HomeServerInformation.BaseURL, matrixUsername, matrixPassword)
if err != nil {
return err
}
var textMessages []string
var formattedTextMessages []string
for _, entry := range entries {
textMessages = append(textMessages, fmt.Sprintf(`[%s] %s - %s`, feed.Title, entry.Title, entry.URL))
formattedTextMessages = append(formattedTextMessages, fmt.Sprintf(`
`
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyFilterWithPictureSource(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "image")
os.Setenv("PROXY_PRIVATE_KEY", "test")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyFilterOnlyNonHTTPWithPictureSource(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "https")
os.Setenv("PROXY_MEDIA_TYPES", "image")
os.Setenv("PROXY_PRIVATE_KEY", "test")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyWithImageDataURL(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "image")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyWithImageSourceDataURL(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "image")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyFilterWithVideo(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "video")
os.Setenv("PROXY_PRIVATE_KEY", "test")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyFilterVideoPoster(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "image")
os.Setenv("PROXY_PRIVATE_KEY", "test")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
func TestProxyFilterVideoPosterOnce(t *testing.T) {
os.Clearenv()
os.Setenv("PROXY_OPTION", "all")
os.Setenv("PROXY_MEDIA_TYPES", "image,video")
os.Setenv("PROXY_PRIVATE_KEY", "test")
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
r := mux.NewRouter()
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
input := ``
expected := ``
output := RewriteDocumentWithRelativeProxyURL(r, input)
if expected != output {
t.Errorf(`Not expected output: got %s`, output)
}
}
v2-2.2.6/internal/mediaproxy/rewriter.go 0000664 0000000 0000000 00000006650 14756465373 0020272 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package mediaproxy // import "miniflux.app/v2/internal/mediaproxy"
import (
"slices"
"strings"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/reader/sanitizer"
"miniflux.app/v2/internal/urllib"
"github.com/PuerkitoBio/goquery"
"github.com/gorilla/mux"
)
type urlProxyRewriter func(router *mux.Router, url string) string
func RewriteDocumentWithRelativeProxyURL(router *mux.Router, htmlDocument string) string {
return genericProxyRewriter(router, ProxifyRelativeURL, htmlDocument)
}
func RewriteDocumentWithAbsoluteProxyURL(router *mux.Router, htmlDocument string) string {
return genericProxyRewriter(router, ProxifyAbsoluteURL, htmlDocument)
}
func genericProxyRewriter(router *mux.Router, proxifyFunction urlProxyRewriter, htmlDocument string) string {
proxyOption := config.Opts.MediaProxyMode()
if proxyOption == "none" {
return htmlDocument
}
doc, err := goquery.NewDocumentFromReader(strings.NewReader(htmlDocument))
if err != nil {
return htmlDocument
}
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
switch mediaType {
case "image":
doc.Find("img, picture source").Each(func(i int, img *goquery.Selection) {
if srcAttrValue, ok := img.Attr("src"); ok {
if shouldProxy(srcAttrValue, proxyOption) {
img.SetAttr("src", proxifyFunction(router, srcAttrValue))
}
}
if srcsetAttrValue, ok := img.Attr("srcset"); ok {
proxifySourceSet(img, router, proxifyFunction, proxyOption, srcsetAttrValue)
}
})
if !slices.Contains(config.Opts.MediaProxyResourceTypes(), "video") {
doc.Find("video").Each(func(i int, video *goquery.Selection) {
if posterAttrValue, ok := video.Attr("poster"); ok {
if shouldProxy(posterAttrValue, proxyOption) {
video.SetAttr("poster", proxifyFunction(router, posterAttrValue))
}
}
})
}
case "audio":
doc.Find("audio, audio source").Each(func(i int, audio *goquery.Selection) {
if srcAttrValue, ok := audio.Attr("src"); ok {
if shouldProxy(srcAttrValue, proxyOption) {
audio.SetAttr("src", proxifyFunction(router, srcAttrValue))
}
}
})
case "video":
doc.Find("video, video source").Each(func(i int, video *goquery.Selection) {
if srcAttrValue, ok := video.Attr("src"); ok {
if shouldProxy(srcAttrValue, proxyOption) {
video.SetAttr("src", proxifyFunction(router, srcAttrValue))
}
}
if posterAttrValue, ok := video.Attr("poster"); ok {
if shouldProxy(posterAttrValue, proxyOption) {
video.SetAttr("poster", proxifyFunction(router, posterAttrValue))
}
}
})
}
}
output, err := doc.FindMatcher(goquery.Single("body")).Html()
if err != nil {
return htmlDocument
}
return output
}
func proxifySourceSet(element *goquery.Selection, router *mux.Router, proxifyFunction urlProxyRewriter, proxyOption, srcsetAttrValue string) {
imageCandidates := sanitizer.ParseSrcSetAttribute(srcsetAttrValue)
for _, imageCandidate := range imageCandidates {
if shouldProxy(imageCandidate.ImageURL, proxyOption) {
imageCandidate.ImageURL = proxifyFunction(router, imageCandidate.ImageURL)
}
}
element.SetAttr("srcset", imageCandidates.String())
}
func shouldProxy(attrValue, proxyOption string) bool {
return !strings.HasPrefix(attrValue, "data:") &&
(proxyOption == "all" || !urllib.IsHTTPS(attrValue))
}
v2-2.2.6/internal/mediaproxy/url.go 0000664 0000000 0000000 00000003447 14756465373 0017232 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package mediaproxy // import "miniflux.app/v2/internal/mediaproxy"
import (
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"log/slog"
"net/url"
"github.com/gorilla/mux"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/route"
)
func ProxifyRelativeURL(router *mux.Router, mediaURL string) string {
if mediaURL == "" {
return ""
}
if customProxyURL := config.Opts.MediaCustomProxyURL(); customProxyURL != "" {
return proxifyURLWithCustomProxy(mediaURL, customProxyURL)
}
mac := hmac.New(sha256.New, config.Opts.MediaProxyPrivateKey())
mac.Write([]byte(mediaURL))
digest := mac.Sum(nil)
return route.Path(router, "proxy", "encodedDigest", base64.URLEncoding.EncodeToString(digest), "encodedURL", base64.URLEncoding.EncodeToString([]byte(mediaURL)))
}
func ProxifyAbsoluteURL(router *mux.Router, mediaURL string) string {
if mediaURL == "" {
return ""
}
if customProxyURL := config.Opts.MediaCustomProxyURL(); customProxyURL != "" {
return proxifyURLWithCustomProxy(mediaURL, customProxyURL)
}
// Note that the proxyified URL is relative to the root URL.
proxifiedUrl := ProxifyRelativeURL(router, mediaURL)
absoluteURL, err := url.JoinPath(config.Opts.RootURL(), proxifiedUrl)
if err != nil {
return mediaURL
}
return absoluteURL
}
func proxifyURLWithCustomProxy(mediaURL, customProxyURL string) string {
if customProxyURL == "" {
return mediaURL
}
absoluteURL, err := url.JoinPath(customProxyURL, base64.URLEncoding.EncodeToString([]byte(mediaURL)))
if err != nil {
slog.Error("Incorrect custom media proxy URL",
slog.String("custom_proxy_url", customProxyURL),
slog.Any("error", err),
)
return mediaURL
}
return absoluteURL
}
v2-2.2.6/internal/metric/ 0000775 0000000 0000000 00000000000 14756465373 0015173 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/metric/metric.go 0000664 0000000 0000000 00000012755 14756465373 0017017 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package metric // import "miniflux.app/v2/internal/metric"
import (
"log/slog"
"time"
"miniflux.app/v2/internal/storage"
"github.com/prometheus/client_golang/prometheus"
)
// Prometheus Metrics.
var (
BackgroundFeedRefreshDuration = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Namespace: "miniflux",
Name: "background_feed_refresh_duration",
Help: "Processing time to refresh feeds from the background workers",
Buckets: prometheus.LinearBuckets(1, 2, 15),
},
[]string{"status"},
)
ScraperRequestDuration = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Namespace: "miniflux",
Name: "scraper_request_duration",
Help: "Web scraper request duration",
Buckets: prometheus.LinearBuckets(1, 2, 25),
},
[]string{"status"},
)
ArchiveEntriesDuration = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Namespace: "miniflux",
Name: "archive_entries_duration",
Help: "Archive entries duration",
Buckets: prometheus.LinearBuckets(1, 2, 30),
},
[]string{"status"},
)
usersGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "users",
Help: "Number of users",
},
)
feedsGauge = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "feeds",
Help: "Number of feeds by status",
},
[]string{"status"},
)
brokenFeedsGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "broken_feeds",
Help: "Number of broken feeds",
},
)
entriesGauge = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "entries",
Help: "Number of entries by status",
},
[]string{"status"},
)
dbOpenConnectionsGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_open_connections",
Help: "The number of established connections both in use and idle",
},
)
dbConnectionsInUseGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_in_use",
Help: "The number of connections currently in use",
},
)
dbConnectionsIdleGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_idle",
Help: "The number of idle connections",
},
)
dbConnectionsWaitCountGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_wait_count",
Help: "The total number of connections waited for",
},
)
dbConnectionsMaxIdleClosedGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_max_idle_closed",
Help: "The total number of connections closed due to SetMaxIdleConns",
},
)
dbConnectionsMaxIdleTimeClosedGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_max_idle_time_closed",
Help: "The total number of connections closed due to SetConnMaxIdleTime",
},
)
dbConnectionsMaxLifetimeClosedGauge = prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: "miniflux",
Name: "db_connections_max_lifetime_closed",
Help: "The total number of connections closed due to SetConnMaxLifetime",
},
)
)
// Collector represents a metric collector.
type Collector struct {
store *storage.Storage
refreshInterval int
}
// NewCollector initializes a new metric collector.
func NewCollector(store *storage.Storage, refreshInterval int) *Collector {
prometheus.MustRegister(BackgroundFeedRefreshDuration)
prometheus.MustRegister(ScraperRequestDuration)
prometheus.MustRegister(ArchiveEntriesDuration)
prometheus.MustRegister(usersGauge)
prometheus.MustRegister(feedsGauge)
prometheus.MustRegister(brokenFeedsGauge)
prometheus.MustRegister(entriesGauge)
prometheus.MustRegister(dbOpenConnectionsGauge)
prometheus.MustRegister(dbConnectionsInUseGauge)
prometheus.MustRegister(dbConnectionsIdleGauge)
prometheus.MustRegister(dbConnectionsWaitCountGauge)
prometheus.MustRegister(dbConnectionsMaxIdleClosedGauge)
prometheus.MustRegister(dbConnectionsMaxIdleTimeClosedGauge)
prometheus.MustRegister(dbConnectionsMaxLifetimeClosedGauge)
return &Collector{store, refreshInterval}
}
// GatherStorageMetrics polls the database to fetch metrics.
func (c *Collector) GatherStorageMetrics() {
for range time.Tick(time.Duration(c.refreshInterval) * time.Second) {
slog.Debug("Collecting metrics from the database")
usersGauge.Set(float64(c.store.CountUsers()))
brokenFeedsGauge.Set(float64(c.store.CountAllFeedsWithErrors()))
feedsCount := c.store.CountAllFeeds()
for status, count := range feedsCount {
feedsGauge.WithLabelValues(status).Set(float64(count))
}
entriesCount := c.store.CountAllEntries()
for status, count := range entriesCount {
entriesGauge.WithLabelValues(status).Set(float64(count))
}
dbStats := c.store.DBStats()
dbOpenConnectionsGauge.Set(float64(dbStats.OpenConnections))
dbConnectionsInUseGauge.Set(float64(dbStats.InUse))
dbConnectionsIdleGauge.Set(float64(dbStats.Idle))
dbConnectionsWaitCountGauge.Set(float64(dbStats.WaitCount))
dbConnectionsMaxIdleClosedGauge.Set(float64(dbStats.MaxIdleClosed))
dbConnectionsMaxIdleTimeClosedGauge.Set(float64(dbStats.MaxIdleTimeClosed))
dbConnectionsMaxLifetimeClosedGauge.Set(float64(dbStats.MaxLifetimeClosed))
}
}
v2-2.2.6/internal/model/ 0000775 0000000 0000000 00000000000 14756465373 0015010 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/model/api_key.go 0000664 0000000 0000000 00000001316 14756465373 0016761 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"time"
"miniflux.app/v2/internal/crypto"
)
// APIKey represents an application API key.
type APIKey struct {
ID int64
UserID int64
Token string
Description string
LastUsedAt *time.Time
CreatedAt time.Time
}
// NewAPIKey initializes a new APIKey.
func NewAPIKey(userID int64, description string) *APIKey {
return &APIKey{
UserID: userID,
Token: crypto.GenerateRandomString(32),
Description: description,
}
}
// APIKeys represents a collection of API Key.
type APIKeys []*APIKey
v2-2.2.6/internal/model/app_session.go 0000664 0000000 0000000 00000003705 14756465373 0017667 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"database/sql/driver"
"encoding/json"
"errors"
"fmt"
)
// SessionData represents the data attached to the session.
type SessionData struct {
CSRF string `json:"csrf"`
OAuth2State string `json:"oauth2_state"`
OAuth2CodeVerifier string `json:"oauth2_code_verifier"`
FlashMessage string `json:"flash_message"`
FlashErrorMessage string `json:"flash_error_message"`
Language string `json:"language"`
Theme string `json:"theme"`
PocketRequestToken string `json:"pocket_request_token"`
LastForceRefresh string `json:"last_force_refresh"`
WebAuthnSessionData WebAuthnSession `json:"webauthn_session_data"`
}
func (s *SessionData) String() string {
return fmt.Sprintf(`CSRF=%q, OAuth2State=%q, OAuth2CodeVerifier=%q, FlashMsg=%q, FlashErrMsg=%q, Lang=%q, Theme=%q, PocketTkn=%q, LastForceRefresh=%s, WebAuthnSession=%q`,
s.CSRF,
s.OAuth2State,
s.OAuth2CodeVerifier,
s.FlashMessage,
s.FlashErrorMessage,
s.Language,
s.Theme,
s.PocketRequestToken,
s.LastForceRefresh,
s.WebAuthnSessionData,
)
}
// Value converts the session data to JSON.
func (s *SessionData) Value() (driver.Value, error) {
j, err := json.Marshal(s)
return j, err
}
// Scan converts raw JSON data.
func (s *SessionData) Scan(src interface{}) error {
source, ok := src.([]byte)
if !ok {
return errors.New("session: unable to assert type of src")
}
err := json.Unmarshal(source, s)
if err != nil {
return fmt.Errorf("session: %v", err)
}
return err
}
// Session represents a session in the system.
type Session struct {
ID string
Data *SessionData
}
func (s *Session) String() string {
return fmt.Sprintf(`ID=%q, Data={%v}`, s.ID, s.Data)
}
v2-2.2.6/internal/model/categories_sort_options.go 0000664 0000000 0000000 00000000556 14756465373 0022314 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
func CategoriesSortingOptions() map[string]string {
return map[string]string{
"unread_count": "form.prefs.select.unread_count",
"alphabetical": "form.prefs.select.alphabetical",
}
}
v2-2.2.6/internal/model/category.go 0000664 0000000 0000000 00000002040 14756465373 0017150 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import "fmt"
// Category represents a feed category.
type Category struct {
ID int64 `json:"id"`
Title string `json:"title"`
UserID int64 `json:"user_id"`
HideGlobally bool `json:"hide_globally"`
FeedCount *int `json:"feed_count,omitempty"`
TotalUnread *int `json:"total_unread,omitempty"`
}
func (c *Category) String() string {
return fmt.Sprintf("ID=%d, UserID=%d, Title=%s", c.ID, c.UserID, c.Title)
}
// CategoryRequest represents the request to create or update a category.
type CategoryRequest struct {
Title string `json:"title"`
HideGlobally string `json:"hide_globally"`
}
// Patch updates category fields.
func (cr *CategoryRequest) Patch(category *Category) {
category.Title = cr.Title
category.HideGlobally = cr.HideGlobally != ""
}
// Categories represents a list of categories.
type Categories []*Category
v2-2.2.6/internal/model/enclosure.go 0000664 0000000 0000000 00000005757 14756465373 0017354 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"strings"
"github.com/gorilla/mux"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/mediaproxy"
"miniflux.app/v2/internal/urllib"
)
// Enclosure represents an attachment.
type Enclosure struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
EntryID int64 `json:"entry_id"`
URL string `json:"url"`
MimeType string `json:"mime_type"`
Size int64 `json:"size"`
MediaProgression int64 `json:"media_progression"`
}
type EnclosureUpdateRequest struct {
MediaProgression int64 `json:"media_progression"`
}
// Html5MimeType will modify the actual MimeType to allow direct playback from HTML5 player for some kind of MimeType
func (e *Enclosure) Html5MimeType() string {
if e.MimeType == "video/m4v" {
return "video/x-m4v"
}
return e.MimeType
}
func (e *Enclosure) IsAudio() bool {
return strings.HasPrefix(strings.ToLower(e.MimeType), "audio/")
}
func (e *Enclosure) IsVideo() bool {
return strings.HasPrefix(strings.ToLower(e.MimeType), "video/")
}
func (e *Enclosure) IsImage() bool {
mimeType := strings.ToLower(e.MimeType)
mediaURL := strings.ToLower(e.URL)
return strings.HasPrefix(mimeType, "image/") || strings.HasSuffix(mediaURL, ".jpg") || strings.HasSuffix(mediaURL, ".jpeg") || strings.HasSuffix(mediaURL, ".png") || strings.HasSuffix(mediaURL, ".gif")
}
// EnclosureList represents a list of attachments.
type EnclosureList []*Enclosure
// FindMediaPlayerEnclosure returns the first enclosure that can be played by a media player.
func (el EnclosureList) FindMediaPlayerEnclosure() *Enclosure {
for _, enclosure := range el {
if enclosure.URL != "" && strings.Contains(enclosure.MimeType, "audio/") || strings.Contains(enclosure.MimeType, "video/") {
return enclosure
}
}
return nil
}
func (el EnclosureList) ContainsAudioOrVideo() bool {
for _, enclosure := range el {
if strings.Contains(enclosure.MimeType, "audio/") || strings.Contains(enclosure.MimeType, "video/") {
return true
}
}
return false
}
func (el EnclosureList) ProxifyEnclosureURL(router *mux.Router) {
proxyOption := config.Opts.MediaProxyMode()
if proxyOption != "none" {
for i := range el {
if urllib.IsHTTPS(el[i].URL) {
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
if strings.HasPrefix(el[i].MimeType, mediaType+"/") {
el[i].URL = mediaproxy.ProxifyAbsoluteURL(router, el[i].URL)
break
}
}
}
}
}
}
func (e *Enclosure) ProxifyEnclosureURL(router *mux.Router) {
proxyOption := config.Opts.MediaProxyMode()
if proxyOption == "all" || proxyOption != "none" && !urllib.IsHTTPS(e.URL) {
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
if strings.HasPrefix(e.MimeType, mediaType+"/") {
e.URL = mediaproxy.ProxifyAbsoluteURL(router, e.URL)
break
}
}
}
}
v2-2.2.6/internal/model/enclosure_test.go 0000664 0000000 0000000 00000002257 14756465373 0020403 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model
import (
"testing"
)
func TestEnclosure_Html5MimeTypeGivesOriginalMimeType(t *testing.T) {
enclosure := Enclosure{MimeType: "thing/thisMimeTypeIsNotExpectedToBeReplaced"}
if enclosure.Html5MimeType() != enclosure.MimeType {
t.Fatalf(
"HTML5 MimeType must provide original MimeType if not explicitly Replaced. Got %s ,expected '%s' ",
enclosure.Html5MimeType(),
enclosure.MimeType,
)
}
}
func TestEnclosure_Html5MimeTypeReplaceStandardM4vByAppleSpecificMimeType(t *testing.T) {
enclosure := Enclosure{MimeType: "video/m4v"}
if enclosure.Html5MimeType() != "video/x-m4v" {
// Solution from this stackoverflow discussion:
// https://stackoverflow.com/questions/15277147/m4v-mimetype-video-mp4-or-video-m4v/66945470#66945470
// tested at the time of this commit (06/2023) on latest Firefox & Vivaldi on this feed
// https://www.florenceporcel.com/podcast/lfhdu.xml
t.Fatalf(
"HTML5 MimeType must be replaced by 'video/x-m4v' when originally video/m4v to ensure playbacks in brownser. Got '%s'",
enclosure.Html5MimeType(),
)
}
}
v2-2.2.6/internal/model/entry.go 0000664 0000000 0000000 00000005331 14756465373 0016502 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"time"
)
// Entry statuses and default sorting order.
const (
EntryStatusUnread = "unread"
EntryStatusRead = "read"
EntryStatusRemoved = "removed"
DefaultSortingOrder = "published_at"
DefaultSortingDirection = "asc"
)
// Entry represents a feed item in the system.
type Entry struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedID int64 `json:"feed_id"`
Status string `json:"status"`
Hash string `json:"hash"`
Title string `json:"title"`
URL string `json:"url"`
CommentsURL string `json:"comments_url"`
Date time.Time `json:"published_at"`
CreatedAt time.Time `json:"created_at"`
ChangedAt time.Time `json:"changed_at"`
Content string `json:"content"`
Author string `json:"author"`
ShareCode string `json:"share_code"`
Starred bool `json:"starred"`
ReadingTime int `json:"reading_time"`
Enclosures EnclosureList `json:"enclosures"`
Feed *Feed `json:"feed,omitempty"`
Tags []string `json:"tags"`
}
func NewEntry() *Entry {
return &Entry{
Enclosures: make(EnclosureList, 0),
Tags: make([]string, 0),
Feed: &Feed{
Category: &Category{},
Icon: &FeedIcon{},
},
}
}
// ShouldMarkAsReadOnView Return whether the entry should be marked as viewed considering all user settings and entry state.
func (e *Entry) ShouldMarkAsReadOnView(user *User) bool {
// Already read, no need to mark as read again. Removed entries are not marked as read
if e.Status != EntryStatusUnread {
return false
}
// There is an enclosure, markAsRead will happen at enclosure completion time, no need to mark as read on view
if user.MarkReadOnMediaPlayerCompletion && e.Enclosures.ContainsAudioOrVideo() {
return false
}
// The user wants to mark as read on view
return user.MarkReadOnView
}
// Entries represents a list of entries.
type Entries []*Entry
// EntriesStatusUpdateRequest represents a request to change entries status.
type EntriesStatusUpdateRequest struct {
EntryIDs []int64 `json:"entry_ids"`
Status string `json:"status"`
}
// EntryUpdateRequest represents a request to update an entry.
type EntryUpdateRequest struct {
Title *string `json:"title"`
Content *string `json:"content"`
}
func (e *EntryUpdateRequest) Patch(entry *Entry) {
if e.Title != nil && *e.Title != "" {
entry.Title = *e.Title
}
if e.Content != nil && *e.Content != "" {
entry.Content = *e.Content
}
}
v2-2.2.6/internal/model/feed.go 0000664 0000000 0000000 00000023174 14756465373 0016251 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"fmt"
"io"
"math"
"time"
"miniflux.app/v2/internal/config"
)
// List of supported schedulers.
const (
SchedulerRoundRobin = "round_robin"
SchedulerEntryFrequency = "entry_frequency"
// Default settings for the feed query builder
DefaultFeedSorting = "parsing_error_count"
DefaultFeedSortingDirection = "desc"
)
// Feed represents a feed in the application.
type Feed struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
Description string `json:"description"`
CheckedAt time.Time `json:"checked_at"`
NextCheckAt time.Time `json:"next_check_at"`
EtagHeader string `json:"etag_header"`
LastModifiedHeader string `json:"last_modified_header"`
ParsingErrorMsg string `json:"parsing_error_message"`
ParsingErrorCount int `json:"parsing_error_count"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
Crawler bool `json:"crawler"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
UrlRewriteRules string `json:"urlrewrite_rules"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Disabled bool `json:"disabled"`
NoMediaPlayer bool `json:"no_media_player"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
HideGlobally bool `json:"hide_globally"`
DisableHTTP2 bool `json:"disable_http2"`
AppriseServiceURLs string `json:"apprise_service_urls"`
WebhookURL string `json:"webhook_url"`
NtfyEnabled bool `json:"ntfy_enabled"`
NtfyPriority int `json:"ntfy_priority"`
PushoverEnabled bool `json:"pushover_enabled,omitempty"`
PushoverPriority int `json:"pushover_priority,omitempty"`
// Non-persisted attributes
Category *Category `json:"category,omitempty"`
Icon *FeedIcon `json:"icon"`
Entries Entries `json:"entries,omitempty"`
// Internal attributes (not exposed in the API and not persisted in the database)
TTL int `json:"-"`
IconURL string `json:"-"`
UnreadCount int `json:"-"`
ReadCount int `json:"-"`
NumberOfVisibleEntries int `json:"-"`
}
type FeedCounters struct {
ReadCounters map[int64]int `json:"reads"`
UnreadCounters map[int64]int `json:"unreads"`
}
func (f *Feed) String() string {
return fmt.Sprintf("ID=%d, UserID=%d, FeedURL=%s, SiteURL=%s, Title=%s, Category={%s}",
f.ID,
f.UserID,
f.FeedURL,
f.SiteURL,
f.Title,
f.Category,
)
}
// WithCategoryID initializes the category attribute of the feed.
func (f *Feed) WithCategoryID(categoryID int64) {
f.Category = &Category{ID: categoryID}
}
// WithTranslatedErrorMessage adds a new error message and increment the error counter.
func (f *Feed) WithTranslatedErrorMessage(message string) {
f.ParsingErrorCount++
f.ParsingErrorMsg = message
}
// ResetErrorCounter removes all previous errors.
func (f *Feed) ResetErrorCounter() {
f.ParsingErrorCount = 0
f.ParsingErrorMsg = ""
}
// CheckedNow set attribute values when the feed is refreshed.
func (f *Feed) CheckedNow() {
f.CheckedAt = time.Now()
if f.SiteURL == "" {
f.SiteURL = f.FeedURL
}
}
// ScheduleNextCheck set "next_check_at" of a feed based on the scheduler selected from the configuration.
func (f *Feed) ScheduleNextCheck(weeklyCount int, refreshDelayInMinutes int) {
f.TTL = refreshDelayInMinutes
// Default to the global config Polling Frequency.
intervalMinutes := config.Opts.SchedulerRoundRobinMinInterval()
if config.Opts.PollingScheduler() == SchedulerEntryFrequency {
if weeklyCount <= 0 {
intervalMinutes = config.Opts.SchedulerEntryFrequencyMaxInterval()
} else {
intervalMinutes = int(math.Round(float64(7*24*60) / float64(weeklyCount*config.Opts.SchedulerEntryFrequencyFactor())))
intervalMinutes = min(intervalMinutes, config.Opts.SchedulerEntryFrequencyMaxInterval())
intervalMinutes = max(intervalMinutes, config.Opts.SchedulerEntryFrequencyMinInterval())
}
}
// If the feed has a TTL or a Retry-After defined, we use it to make sure we don't check it too often.
if refreshDelayInMinutes > 0 && refreshDelayInMinutes > intervalMinutes {
intervalMinutes = refreshDelayInMinutes
}
f.NextCheckAt = time.Now().Add(time.Minute * time.Duration(intervalMinutes))
}
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
NoMediaPlayer bool `json:"no_media_player"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
HideGlobally bool `json:"hide_globally"`
UrlRewriteRules string `json:"urlrewrite_rules"`
DisableHTTP2 bool `json:"disable_http2"`
}
type FeedCreationRequestFromSubscriptionDiscovery struct {
Content io.ReadSeeker
ETag string
LastModified string
FeedCreationRequest
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
Description *string `json:"description"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
UrlRewriteRules *string `json:"urlrewrite_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Cookie *string `json:"cookie"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
NoMediaPlayer *bool `json:"no_media_player"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
HideGlobally *bool `json:"hide_globally"`
DisableHTTP2 *bool `json:"disable_http2"`
}
// Patch updates a feed with modified values.
func (f *FeedModificationRequest) Patch(feed *Feed) {
if f.FeedURL != nil && *f.FeedURL != "" {
feed.FeedURL = *f.FeedURL
}
if f.SiteURL != nil && *f.SiteURL != "" {
feed.SiteURL = *f.SiteURL
}
if f.Title != nil && *f.Title != "" {
feed.Title = *f.Title
}
if f.Description != nil && *f.Description != "" {
feed.Description = *f.Description
}
if f.ScraperRules != nil {
feed.ScraperRules = *f.ScraperRules
}
if f.RewriteRules != nil {
feed.RewriteRules = *f.RewriteRules
}
if f.KeeplistRules != nil {
feed.KeeplistRules = *f.KeeplistRules
}
if f.UrlRewriteRules != nil {
feed.UrlRewriteRules = *f.UrlRewriteRules
}
if f.BlocklistRules != nil {
feed.BlocklistRules = *f.BlocklistRules
}
if f.Crawler != nil {
feed.Crawler = *f.Crawler
}
if f.UserAgent != nil {
feed.UserAgent = *f.UserAgent
}
if f.Cookie != nil {
feed.Cookie = *f.Cookie
}
if f.Username != nil {
feed.Username = *f.Username
}
if f.Password != nil {
feed.Password = *f.Password
}
if f.CategoryID != nil && *f.CategoryID > 0 {
feed.Category.ID = *f.CategoryID
}
if f.Disabled != nil {
feed.Disabled = *f.Disabled
}
if f.NoMediaPlayer != nil {
feed.NoMediaPlayer = *f.NoMediaPlayer
}
if f.IgnoreHTTPCache != nil {
feed.IgnoreHTTPCache = *f.IgnoreHTTPCache
}
if f.AllowSelfSignedCertificates != nil {
feed.AllowSelfSignedCertificates = *f.AllowSelfSignedCertificates
}
if f.FetchViaProxy != nil {
feed.FetchViaProxy = *f.FetchViaProxy
}
if f.HideGlobally != nil {
feed.HideGlobally = *f.HideGlobally
}
if f.DisableHTTP2 != nil {
feed.DisableHTTP2 = *f.DisableHTTP2
}
}
// Feeds is a list of feed
type Feeds []*Feed
v2-2.2.6/internal/model/feed_test.go 0000664 0000000 0000000 00000023742 14756465373 0017311 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"fmt"
"os"
"testing"
"time"
"miniflux.app/v2/internal/config"
)
const (
largeWeeklyCount = 10080
noRefreshDelay = 0
)
func TestFeedCategorySetter(t *testing.T) {
feed := &Feed{}
feed.WithCategoryID(int64(123))
if feed.Category == nil {
t.Fatal(`The category field should not be null`)
}
if feed.Category.ID != int64(123) {
t.Error(`The category ID must be set`)
}
}
func TestFeedErrorCounter(t *testing.T) {
feed := &Feed{}
feed.WithTranslatedErrorMessage("Some Error")
if feed.ParsingErrorMsg != "Some Error" {
t.Error(`The error message must be set`)
}
if feed.ParsingErrorCount != 1 {
t.Error(`The error counter must be set to 1`)
}
feed.ResetErrorCounter()
if feed.ParsingErrorMsg != "" {
t.Error(`The error message must be removed`)
}
if feed.ParsingErrorCount != 0 {
t.Error(`The error counter must be set to 0`)
}
}
func TestFeedCheckedNow(t *testing.T) {
feed := &Feed{}
feed.FeedURL = "https://example.org/feed"
feed.CheckedNow()
if feed.SiteURL != feed.FeedURL {
t.Error(`The site URL must not be empty`)
}
if feed.CheckedAt.IsZero() {
t.Error(`The checked date must be set`)
}
}
func checkTargetInterval(t *testing.T, feed *Feed, targetInterval int, timeBefore time.Time, message string) {
if feed.NextCheckAt.Before(timeBefore.Add(time.Minute * time.Duration(targetInterval))) {
t.Errorf(`The next_check_at should be after timeBefore + %s`, message)
}
if feed.NextCheckAt.After(time.Now().Add(time.Minute * time.Duration(targetInterval))) {
t.Errorf(`The next_check_at should be before now + %s`, message)
}
}
func TestFeedScheduleNextCheckRoundRobinDefault(t *testing.T) {
os.Clearenv()
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
feed.ScheduleNextCheck(0, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := config.Opts.SchedulerRoundRobinMinInterval()
checkTargetInterval(t, feed, targetInterval, timeBefore, "TestFeedScheduleNextCheckRoundRobinDefault")
}
func TestFeedScheduleNextCheckRoundRobinWithRefreshDelayAboveMinInterval(t *testing.T) {
os.Clearenv()
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
feed.ScheduleNextCheck(0, config.Opts.SchedulerRoundRobinMinInterval()+30)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
expectedInterval := config.Opts.SchedulerRoundRobinMinInterval() + 30
checkTargetInterval(t, feed, expectedInterval, timeBefore, "TestFeedScheduleNextCheckRoundRobinWithRefreshDelayAboveMinInterval")
}
func TestFeedScheduleNextCheckRoundRobinWithRefreshDelayBelowMinInterval(t *testing.T) {
os.Clearenv()
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
feed.ScheduleNextCheck(0, config.Opts.SchedulerRoundRobinMinInterval()-30)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
expectedInterval := config.Opts.SchedulerRoundRobinMinInterval()
checkTargetInterval(t, feed, expectedInterval, timeBefore, "TestFeedScheduleNextCheckRoundRobinWithRefreshDelayBelowMinInterval")
}
func TestFeedScheduleNextCheckRoundRobinMinInterval(t *testing.T) {
minInterval := 1
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "round_robin")
os.Setenv("SCHEDULER_ROUND_ROBIN_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
feed.ScheduleNextCheck(0, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
expectedInterval := minInterval
checkTargetInterval(t, feed, expectedInterval, timeBefore, "TestFeedScheduleNextCheckRoundRobinMinInterval")
}
func TestFeedScheduleNextCheckEntryFrequencyMaxInterval(t *testing.T) {
maxInterval := 5
minInterval := 1
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", fmt.Sprintf("%d", maxInterval))
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
// Use a very small weekly count to trigger the max interval
weeklyCount := 1
feed.ScheduleNextCheck(weeklyCount, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := maxInterval
checkTargetInterval(t, feed, targetInterval, timeBefore, "entry frequency max interval")
}
func TestFeedScheduleNextCheckEntryFrequencyMaxIntervalZeroWeeklyCount(t *testing.T) {
maxInterval := 5
minInterval := 1
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", fmt.Sprintf("%d", maxInterval))
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
// Use a very small weekly count to trigger the max interval
weeklyCount := 0
feed.ScheduleNextCheck(weeklyCount, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := maxInterval
checkTargetInterval(t, feed, targetInterval, timeBefore, "entry frequency max interval")
}
func TestFeedScheduleNextCheckEntryFrequencyMinInterval(t *testing.T) {
maxInterval := 500
minInterval := 100
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", fmt.Sprintf("%d", maxInterval))
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
// Use a very large weekly count to trigger the min interval
weeklyCount := largeWeeklyCount
feed.ScheduleNextCheck(weeklyCount, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := minInterval
checkTargetInterval(t, feed, targetInterval, timeBefore, "entry frequency min interval")
}
func TestFeedScheduleNextCheckEntryFrequencyFactor(t *testing.T) {
factor := 2
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_FACTOR", fmt.Sprintf("%d", factor))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
weeklyCount := 7
feed.ScheduleNextCheck(weeklyCount, noRefreshDelay)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := config.Opts.SchedulerEntryFrequencyMaxInterval() / factor
checkTargetInterval(t, feed, targetInterval, timeBefore, "factor * count")
}
func TestFeedScheduleNextCheckEntryFrequencySmallNewTTL(t *testing.T) {
// If the feed has a TTL defined, we use it to make sure we don't check it too often.
maxInterval := 500
minInterval := 100
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", fmt.Sprintf("%d", maxInterval))
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
// Use a very large weekly count to trigger the min interval
weeklyCount := largeWeeklyCount
// TTL is smaller than minInterval.
newTTL := minInterval / 2
feed.ScheduleNextCheck(weeklyCount, newTTL)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := minInterval
checkTargetInterval(t, feed, targetInterval, timeBefore, "entry frequency min interval")
if feed.NextCheckAt.Before(timeBefore.Add(time.Minute * time.Duration(newTTL))) {
t.Error(`The next_check_at should be after timeBefore + TTL`)
}
}
func TestFeedScheduleNextCheckEntryFrequencyLargeNewTTL(t *testing.T) {
// If the feed has a TTL defined, we use it to make sure we don't check it too often.
maxInterval := 500
minInterval := 100
os.Clearenv()
os.Setenv("POLLING_SCHEDULER", "entry_frequency")
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL", fmt.Sprintf("%d", maxInterval))
os.Setenv("SCHEDULER_ENTRY_FREQUENCY_MIN_INTERVAL", fmt.Sprintf("%d", minInterval))
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
timeBefore := time.Now()
feed := &Feed{}
// Use a very large weekly count to trigger the min interval
weeklyCount := largeWeeklyCount
// TTL is larger than minInterval.
newTTL := minInterval * 2
feed.ScheduleNextCheck(weeklyCount, newTTL)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
targetInterval := newTTL
checkTargetInterval(t, feed, targetInterval, timeBefore, "TTL")
if feed.NextCheckAt.Before(timeBefore.Add(time.Minute * time.Duration(minInterval))) {
t.Error(`The next_check_at should be after timeBefore + entry frequency min interval`)
}
}
v2-2.2.6/internal/model/home_page.go 0000664 0000000 0000000 00000000716 14756465373 0017267 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
// HomePages returns the list of available home pages.
func HomePages() map[string]string {
return map[string]string{
"unread": "menu.unread",
"starred": "menu.starred",
"history": "menu.history",
"feeds": "menu.feeds",
"categories": "menu.categories",
}
}
v2-2.2.6/internal/model/icon.go 0000664 0000000 0000000 00000001417 14756465373 0016272 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"encoding/base64"
"fmt"
)
// Icon represents a website icon (favicon)
type Icon struct {
ID int64 `json:"id"`
Hash string `json:"hash"`
MimeType string `json:"mime_type"`
Content []byte `json:"-"`
}
// DataURL returns the data URL of the icon.
func (i *Icon) DataURL() string {
return fmt.Sprintf("%s;base64,%s", i.MimeType, base64.StdEncoding.EncodeToString(i.Content))
}
// Icons represents a list of icons.
type Icons []*Icon
// FeedIcon is a junction table between feeds and icons.
type FeedIcon struct {
FeedID int64 `json:"feed_id"`
IconID int64 `json:"icon_id"`
}
v2-2.2.6/internal/model/integration.go 0000664 0000000 0000000 00000011173 14756465373 0017665 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
// Integration represents user integration settings.
type Integration struct {
UserID int64
BetulaEnabled bool
BetulaURL string
BetulaToken string
PinboardEnabled bool
PinboardToken string
PinboardTags string
PinboardMarkAsUnread bool
InstapaperEnabled bool
InstapaperUsername string
InstapaperPassword string
FeverEnabled bool
FeverUsername string
FeverToken string
GoogleReaderEnabled bool
GoogleReaderUsername string
GoogleReaderPassword string
WallabagEnabled bool
WallabagOnlyURL bool
WallabagURL string
WallabagClientID string
WallabagClientSecret string
WallabagUsername string
WallabagPassword string
NunuxKeeperEnabled bool
NunuxKeeperURL string
NunuxKeeperAPIKey string
NotionEnabled bool
NotionToken string
NotionPageID string
EspialEnabled bool
EspialURL string
EspialAPIKey string
EspialTags string
ReadwiseEnabled bool
ReadwiseAPIKey string
PocketEnabled bool
PocketAccessToken string
PocketConsumerKey string
TelegramBotEnabled bool
TelegramBotToken string
TelegramBotChatID string
TelegramBotTopicID *int64
TelegramBotDisableWebPagePreview bool
TelegramBotDisableNotification bool
TelegramBotDisableButtons bool
LinkAceEnabled bool
LinkAceURL string
LinkAceAPIKey string
LinkAceTags string
LinkAcePrivate bool
LinkAceCheckDisabled bool
LinkdingEnabled bool
LinkdingURL string
LinkdingAPIKey string
LinkdingTags string
LinkdingMarkAsUnread bool
LinkwardenEnabled bool
LinkwardenURL string
LinkwardenAPIKey string
MatrixBotEnabled bool
MatrixBotUser string
MatrixBotPassword string
MatrixBotURL string
MatrixBotChatID string
AppriseEnabled bool
AppriseURL string
AppriseServicesURL string
ReadeckEnabled bool
ReadeckURL string
ReadeckAPIKey string
ReadeckLabels string
ReadeckOnlyURL bool
ShioriEnabled bool
ShioriURL string
ShioriUsername string
ShioriPassword string
ShaarliEnabled bool
ShaarliURL string
ShaarliAPISecret string
WebhookEnabled bool
WebhookURL string
WebhookSecret string
RSSBridgeEnabled bool
RSSBridgeURL string
OmnivoreEnabled bool
OmnivoreAPIKey string
OmnivoreURL string
RaindropEnabled bool
RaindropToken string
RaindropCollectionID string
RaindropTags string
NtfyEnabled bool
NtfyTopic string
NtfyURL string
NtfyAPIToken string
NtfyUsername string
NtfyPassword string
NtfyIconURL string
NtfyInternalLinks bool
CuboxEnabled bool
CuboxAPILink string
DiscordEnabled bool
DiscordWebhookLink string
SlackEnabled bool
SlackWebhookLink string
PushoverEnabled bool
PushoverUser string
PushoverToken string
PushoverDevice string
PushoverPrefix string
}
v2-2.2.6/internal/model/job.go 0000664 0000000 0000000 00000000526 14756465373 0016114 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
// Job represents a payload sent to the processing queue.
type Job struct {
UserID int64
FeedID int64
}
// JobList represents a list of jobs.
type JobList []Job
v2-2.2.6/internal/model/model.go 0000664 0000000 0000000 00000000641 14756465373 0016440 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
type Number interface {
int | int64 | float64
}
func OptionalNumber[T Number](value T) *T {
if value > 0 {
return &value
}
return nil
}
func OptionalString(value string) *string {
if value != "" {
return &value
}
return nil
}
v2-2.2.6/internal/model/subscription.go 0000664 0000000 0000000 00000001374 14756465373 0020070 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
// SubscriptionDiscoveryRequest represents a request to discover subscriptions.
type SubscriptionDiscoveryRequest struct {
URL string `json:"url"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
FetchViaProxy bool `json:"fetch_via_proxy"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
DisableHTTP2 bool `json:"disable_http2"`
}
v2-2.2.6/internal/model/theme.go 0000664 0000000 0000000 00000002133 14756465373 0016440 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
// Themes returns the list of available themes.
func Themes() map[string]string {
return map[string]string{
"light_serif": "Light - Serif",
"light_sans_serif": "Light - Sans Serif",
"dark_serif": "Dark - Serif",
"dark_sans_serif": "Dark - Sans Serif",
"system_serif": "System - Serif",
"system_sans_serif": "System - Sans Serif",
}
}
// ThemeColor returns the color for the address bar or/and the browser color.
// https://developer.mozilla.org/en-US/docs/Web/Manifest#theme_color
// https://developers.google.com/web/tools/lighthouse/audits/address-bar
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name/theme-color
func ThemeColor(theme, colorScheme string) string {
switch theme {
case "dark_serif", "dark_sans_serif":
return "#222"
case "system_serif", "system_sans_serif":
if colorScheme == "dark" {
return "#222"
}
return "#fff"
default:
return "#fff"
}
}
v2-2.2.6/internal/model/user.go 0000664 0000000 0000000 00000016214 14756465373 0016321 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"time"
"miniflux.app/v2/internal/timezone"
)
// User represents a user in the system.
type User struct {
ID int64 `json:"id"`
Username string `json:"username"`
Password string `json:"-"`
IsAdmin bool `json:"is_admin"`
Theme string `json:"theme"`
Language string `json:"language"`
Timezone string `json:"timezone"`
EntryDirection string `json:"entry_sorting_direction"`
EntryOrder string `json:"entry_sorting_order"`
Stylesheet string `json:"stylesheet"`
CustomJS string `json:"custom_js"`
ExternalFontHosts string `json:"external_font_hosts"`
GoogleID string `json:"google_id"`
OpenIDConnectID string `json:"openid_connect_id"`
EntriesPerPage int `json:"entries_per_page"`
KeyboardShortcuts bool `json:"keyboard_shortcuts"`
ShowReadingTime bool `json:"show_reading_time"`
EntrySwipe bool `json:"entry_swipe"`
GestureNav string `json:"gesture_nav"`
LastLoginAt *time.Time `json:"last_login_at"`
DisplayMode string `json:"display_mode"`
DefaultReadingSpeed int `json:"default_reading_speed"`
CJKReadingSpeed int `json:"cjk_reading_speed"`
DefaultHomePage string `json:"default_home_page"`
CategoriesSortingOrder string `json:"categories_sorting_order"`
MarkReadOnView bool `json:"mark_read_on_view"`
MarkReadOnMediaPlayerCompletion bool `json:"mark_read_on_media_player_completion"`
MediaPlaybackRate float64 `json:"media_playback_rate"`
BlockFilterEntryRules string `json:"block_filter_entry_rules"`
KeepFilterEntryRules string `json:"keep_filter_entry_rules"`
}
// UserCreationRequest represents the request to create a user.
type UserCreationRequest struct {
Username string `json:"username"`
Password string `json:"password"`
IsAdmin bool `json:"is_admin"`
GoogleID string `json:"google_id"`
OpenIDConnectID string `json:"openid_connect_id"`
}
// UserModificationRequest represents the request to update a user.
type UserModificationRequest struct {
Username *string `json:"username"`
Password *string `json:"password"`
Theme *string `json:"theme"`
Language *string `json:"language"`
Timezone *string `json:"timezone"`
EntryDirection *string `json:"entry_sorting_direction"`
EntryOrder *string `json:"entry_sorting_order"`
Stylesheet *string `json:"stylesheet"`
CustomJS *string `json:"custom_js"`
ExternalFontHosts *string `json:"external_font_hosts"`
GoogleID *string `json:"google_id"`
OpenIDConnectID *string `json:"openid_connect_id"`
EntriesPerPage *int `json:"entries_per_page"`
IsAdmin *bool `json:"is_admin"`
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
ShowReadingTime *bool `json:"show_reading_time"`
EntrySwipe *bool `json:"entry_swipe"`
GestureNav *string `json:"gesture_nav"`
DisplayMode *string `json:"display_mode"`
DefaultReadingSpeed *int `json:"default_reading_speed"`
CJKReadingSpeed *int `json:"cjk_reading_speed"`
DefaultHomePage *string `json:"default_home_page"`
CategoriesSortingOrder *string `json:"categories_sorting_order"`
MarkReadOnView *bool `json:"mark_read_on_view"`
MarkReadOnMediaPlayerCompletion *bool `json:"mark_read_on_media_player_completion"`
MediaPlaybackRate *float64 `json:"media_playback_rate"`
BlockFilterEntryRules *string `json:"block_filter_entry_rules"`
KeepFilterEntryRules *string `json:"keep_filter_entry_rules"`
}
// Patch updates the User object with the modification request.
func (u *UserModificationRequest) Patch(user *User) {
if u.Username != nil {
user.Username = *u.Username
}
if u.Password != nil {
user.Password = *u.Password
}
if u.IsAdmin != nil {
user.IsAdmin = *u.IsAdmin
}
if u.Theme != nil {
user.Theme = *u.Theme
}
if u.Language != nil {
user.Language = *u.Language
}
if u.Timezone != nil {
user.Timezone = *u.Timezone
}
if u.EntryDirection != nil {
user.EntryDirection = *u.EntryDirection
}
if u.EntryOrder != nil {
user.EntryOrder = *u.EntryOrder
}
if u.Stylesheet != nil {
user.Stylesheet = *u.Stylesheet
}
if u.CustomJS != nil {
user.CustomJS = *u.CustomJS
}
if u.ExternalFontHosts != nil {
user.ExternalFontHosts = *u.ExternalFontHosts
}
if u.GoogleID != nil {
user.GoogleID = *u.GoogleID
}
if u.OpenIDConnectID != nil {
user.OpenIDConnectID = *u.OpenIDConnectID
}
if u.EntriesPerPage != nil {
user.EntriesPerPage = *u.EntriesPerPage
}
if u.KeyboardShortcuts != nil {
user.KeyboardShortcuts = *u.KeyboardShortcuts
}
if u.ShowReadingTime != nil {
user.ShowReadingTime = *u.ShowReadingTime
}
if u.EntrySwipe != nil {
user.EntrySwipe = *u.EntrySwipe
}
if u.GestureNav != nil {
user.GestureNav = *u.GestureNav
}
if u.DisplayMode != nil {
user.DisplayMode = *u.DisplayMode
}
if u.DefaultReadingSpeed != nil {
user.DefaultReadingSpeed = *u.DefaultReadingSpeed
}
if u.CJKReadingSpeed != nil {
user.CJKReadingSpeed = *u.CJKReadingSpeed
}
if u.DefaultHomePage != nil {
user.DefaultHomePage = *u.DefaultHomePage
}
if u.CategoriesSortingOrder != nil {
user.CategoriesSortingOrder = *u.CategoriesSortingOrder
}
if u.MarkReadOnView != nil {
user.MarkReadOnView = *u.MarkReadOnView
}
if u.MarkReadOnMediaPlayerCompletion != nil {
user.MarkReadOnMediaPlayerCompletion = *u.MarkReadOnMediaPlayerCompletion
}
if u.MediaPlaybackRate != nil {
user.MediaPlaybackRate = *u.MediaPlaybackRate
}
if u.BlockFilterEntryRules != nil {
user.BlockFilterEntryRules = *u.BlockFilterEntryRules
}
if u.KeepFilterEntryRules != nil {
user.KeepFilterEntryRules = *u.KeepFilterEntryRules
}
}
// UseTimezone converts last login date to the given timezone.
func (u *User) UseTimezone(tz string) {
if u.LastLoginAt != nil {
*u.LastLoginAt = timezone.Convert(tz, *u.LastLoginAt)
}
}
// Users represents a list of users.
type Users []*User
// UseTimezone converts last login timestamp of all users to the given timezone.
func (u Users) UseTimezone(tz string) {
for _, user := range u {
user.UseTimezone(tz)
}
}
v2-2.2.6/internal/model/user_session.go 0000664 0000000 0000000 00000001741 14756465373 0020063 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"fmt"
"time"
"miniflux.app/v2/internal/timezone"
)
// UserSession represents a user session in the system.
type UserSession struct {
ID int64
UserID int64
Token string
CreatedAt time.Time
UserAgent string
IP string
}
func (u *UserSession) String() string {
return fmt.Sprintf(`ID=%q, UserID=%q, IP=%q, Token=%q`, u.ID, u.UserID, u.IP, u.Token)
}
// UseTimezone converts creation date to the given timezone.
func (u *UserSession) UseTimezone(tz string) {
u.CreatedAt = timezone.Convert(tz, u.CreatedAt)
}
// UserSessions represents a list of sessions.
type UserSessions []*UserSession
// UseTimezone converts creation date of all sessions to the given timezone.
func (u UserSessions) UseTimezone(tz string) {
for _, session := range u {
session.UseTimezone(tz)
}
}
v2-2.2.6/internal/model/webauthn.go 0000664 0000000 0000000 00000002144 14756465373 0017155 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package model // import "miniflux.app/v2/internal/model"
import (
"database/sql/driver"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"time"
"github.com/go-webauthn/webauthn/webauthn"
)
// handle marshalling / unmarshalling session data
type WebAuthnSession struct {
*webauthn.SessionData
}
func (s WebAuthnSession) Value() (driver.Value, error) {
return json.Marshal(s)
}
func (s *WebAuthnSession) Scan(value interface{}) error {
b, ok := value.([]byte)
if !ok {
return errors.New("type assertion to []byte failed")
}
return json.Unmarshal(b, &s)
}
func (s WebAuthnSession) String() string {
if s.SessionData == nil {
return "{}"
}
return fmt.Sprintf("{Challenge: %s, UserID: %x}", s.SessionData.Challenge, s.SessionData.UserID)
}
type WebAuthnCredential struct {
Credential webauthn.Credential
Name string
AddedOn *time.Time
LastSeenOn *time.Time
Handle []byte
}
func (s WebAuthnCredential) HandleEncoded() string {
return hex.EncodeToString(s.Handle)
}
v2-2.2.6/internal/oauth2/ 0000775 0000000 0000000 00000000000 14756465373 0015112 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/oauth2/authorization.go 0000664 0000000 0000000 00000002103 14756465373 0020335 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"crypto/sha256"
"encoding/base64"
"golang.org/x/oauth2"
"miniflux.app/v2/internal/crypto"
)
type Authorization struct {
url string
state string
codeVerifier string
}
func (u *Authorization) RedirectURL() string {
return u.url
}
func (u *Authorization) State() string {
return u.state
}
func (u *Authorization) CodeVerifier() string {
return u.codeVerifier
}
func GenerateAuthorization(config *oauth2.Config) *Authorization {
codeVerifier := crypto.GenerateRandomStringHex(32)
sum := sha256.Sum256([]byte(codeVerifier))
state := crypto.GenerateRandomStringHex(24)
authUrl := config.AuthCodeURL(
state,
oauth2.SetAuthURLParam("code_challenge_method", "S256"),
oauth2.SetAuthURLParam("code_challenge", base64.RawURLEncoding.EncodeToString(sum[:])),
)
return &Authorization{
url: authUrl,
state: state,
codeVerifier: codeVerifier,
}
}
v2-2.2.6/internal/oauth2/google.go 0000664 0000000 0000000 00000004347 14756465373 0016725 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"context"
"encoding/json"
"fmt"
"miniflux.app/v2/internal/model"
"golang.org/x/oauth2"
)
type googleProfile struct {
Sub string `json:"sub"`
Email string `json:"email"`
}
type googleProvider struct {
clientID string
clientSecret string
redirectURL string
}
func NewGoogleProvider(clientID, clientSecret, redirectURL string) *googleProvider {
return &googleProvider{clientID: clientID, clientSecret: clientSecret, redirectURL: redirectURL}
}
func (g *googleProvider) GetConfig() *oauth2.Config {
return &oauth2.Config{
RedirectURL: g.redirectURL,
ClientID: g.clientID,
ClientSecret: g.clientSecret,
Scopes: []string{"email"},
Endpoint: oauth2.Endpoint{
AuthURL: "https://accounts.google.com/o/oauth2/auth",
TokenURL: "https://accounts.google.com/o/oauth2/token",
},
}
}
func (g *googleProvider) GetUserExtraKey() string {
return "google_id"
}
func (g *googleProvider) GetProfile(ctx context.Context, code, codeVerifier string) (*Profile, error) {
conf := g.GetConfig()
token, err := conf.Exchange(ctx, code, oauth2.SetAuthURLParam("code_verifier", codeVerifier))
if err != nil {
return nil, fmt.Errorf("google: failed to exchange token: %w", err)
}
client := conf.Client(ctx, token)
resp, err := client.Get("https://www.googleapis.com/oauth2/v3/userinfo")
if err != nil {
return nil, fmt.Errorf("google: failed to get user info: %w", err)
}
defer resp.Body.Close()
var user googleProfile
decoder := json.NewDecoder(resp.Body)
if err := decoder.Decode(&user); err != nil {
return nil, fmt.Errorf("google: unable to unserialize Google profile: %w", err)
}
profile := &Profile{Key: g.GetUserExtraKey(), ID: user.Sub, Username: user.Email}
return profile, nil
}
func (g *googleProvider) PopulateUserCreationWithProfileID(user *model.UserCreationRequest, profile *Profile) {
user.GoogleID = profile.ID
}
func (g *googleProvider) PopulateUserWithProfileID(user *model.User, profile *Profile) {
user.GoogleID = profile.ID
}
func (g *googleProvider) UnsetUserProfileID(user *model.User) {
user.GoogleID = ""
}
v2-2.2.6/internal/oauth2/manager.go 0000664 0000000 0000000 00000002175 14756465373 0017060 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"context"
"errors"
"log/slog"
)
type Manager struct {
providers map[string]Provider
}
func (m *Manager) FindProvider(name string) (Provider, error) {
if provider, found := m.providers[name]; found {
return provider, nil
}
return nil, errors.New("oauth2 provider not found")
}
func (m *Manager) AddProvider(name string, provider Provider) {
m.providers[name] = provider
}
func NewManager(ctx context.Context, clientID, clientSecret, redirectURL, oidcDiscoveryEndpoint string) *Manager {
m := &Manager{providers: make(map[string]Provider)}
m.AddProvider("google", NewGoogleProvider(clientID, clientSecret, redirectURL))
if oidcDiscoveryEndpoint != "" {
if genericOidcProvider, err := NewOidcProvider(ctx, clientID, clientSecret, redirectURL, oidcDiscoveryEndpoint); err != nil {
slog.Error("Failed to initialize OIDC provider",
slog.Any("error", err),
)
} else {
m.AddProvider("oidc", genericOidcProvider)
}
}
return m
}
v2-2.2.6/internal/oauth2/oidc.go 0000664 0000000 0000000 00000005455 14756465373 0016370 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"context"
"errors"
"fmt"
"miniflux.app/v2/internal/model"
"github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2"
)
var (
ErrEmptyUsername = errors.New("oidc: username is empty")
)
type oidcProvider struct {
clientID string
clientSecret string
redirectURL string
provider *oidc.Provider
}
func NewOidcProvider(ctx context.Context, clientID, clientSecret, redirectURL, discoveryEndpoint string) (*oidcProvider, error) {
provider, err := oidc.NewProvider(ctx, discoveryEndpoint)
if err != nil {
return nil, fmt.Errorf(`oidc: failed to initialize provider %q: %w`, discoveryEndpoint, err)
}
return &oidcProvider{
clientID: clientID,
clientSecret: clientSecret,
redirectURL: redirectURL,
provider: provider,
}, nil
}
func (o *oidcProvider) GetUserExtraKey() string {
return "openid_connect_id"
}
func (o *oidcProvider) GetConfig() *oauth2.Config {
return &oauth2.Config{
RedirectURL: o.redirectURL,
ClientID: o.clientID,
ClientSecret: o.clientSecret,
Scopes: []string{oidc.ScopeOpenID, "profile", "email"},
Endpoint: o.provider.Endpoint(),
}
}
func (o *oidcProvider) GetProfile(ctx context.Context, code, codeVerifier string) (*Profile, error) {
conf := o.GetConfig()
token, err := conf.Exchange(ctx, code, oauth2.SetAuthURLParam("code_verifier", codeVerifier))
if err != nil {
return nil, fmt.Errorf(`oidc: failed to exchange token: %w`, err)
}
userInfo, err := o.provider.UserInfo(ctx, oauth2.StaticTokenSource(token))
if err != nil {
return nil, fmt.Errorf(`oidc: failed to get user info: %w`, err)
}
profile := &Profile{
Key: o.GetUserExtraKey(),
ID: userInfo.Subject,
}
var userClaims userClaims
if err := userInfo.Claims(&userClaims); err != nil {
return nil, fmt.Errorf(`oidc: failed to parse user claims: %w`, err)
}
for _, value := range []string{userClaims.Email, userClaims.PreferredUsername, userClaims.Name, userClaims.Profile} {
if value != "" {
profile.Username = value
break
}
}
if profile.Username == "" {
return nil, ErrEmptyUsername
}
return profile, nil
}
func (o *oidcProvider) PopulateUserCreationWithProfileID(user *model.UserCreationRequest, profile *Profile) {
user.OpenIDConnectID = profile.ID
}
func (o *oidcProvider) PopulateUserWithProfileID(user *model.User, profile *Profile) {
user.OpenIDConnectID = profile.ID
}
func (o *oidcProvider) UnsetUserProfileID(user *model.User) {
user.OpenIDConnectID = ""
}
type userClaims struct {
Email string `json:"email"`
Profile string `json:"profile"`
Name string `json:"name"`
PreferredUsername string `json:"preferred_username"`
}
v2-2.2.6/internal/oauth2/profile.go 0000664 0000000 0000000 00000000654 14756465373 0017106 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"fmt"
)
// Profile is the OAuth2 user profile.
type Profile struct {
Key string
ID string
Username string
}
func (p Profile) String() string {
return fmt.Sprintf(`Key=%s ; ID=%s ; Username=%s`, p.Key, p.ID, p.Username)
}
v2-2.2.6/internal/oauth2/provider.go 0000664 0000000 0000000 00000001223 14756465373 0017271 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package oauth2 // import "miniflux.app/v2/internal/oauth2"
import (
"context"
"golang.org/x/oauth2"
"miniflux.app/v2/internal/model"
)
// Provider is an interface for OAuth2 providers.
type Provider interface {
GetConfig() *oauth2.Config
GetUserExtraKey() string
GetProfile(ctx context.Context, code, codeVerifier string) (*Profile, error)
PopulateUserCreationWithProfileID(user *model.UserCreationRequest, profile *Profile)
PopulateUserWithProfileID(user *model.User, profile *Profile)
UnsetUserProfileID(user *model.User)
}
v2-2.2.6/internal/reader/ 0000775 0000000 0000000 00000000000 14756465373 0015152 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/reader/atom/ 0000775 0000000 0000000 00000000000 14756465373 0016112 5 ustar 00root root 0000000 0000000 v2-2.2.6/internal/reader/atom/atom_03.go 0000664 0000000 0000000 00000016203 14756465373 0017705 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"encoding/base64"
"html"
"strings"
)
// Specs: http://web.archive.org/web/20060811235523/http://www.mnot.net/drafts/draft-nottingham-atom-format-02.html
type Atom03Feed struct {
Version string `xml:"version,attr"`
// The "atom:id" element's content conveys a permanent, globally unique identifier for the feed.
// It MUST NOT change over time, even if the feed is relocated. atom:feed elements MAY contain an atom:id element,
// but MUST NOT contain more than one. The content of this element, when present, MUST be a URI.
ID string `xml:"http://purl.org/atom/ns# id"`
// The "atom:title" element is a Content construct that conveys a human-readable title for the feed.
// atom:feed elements MUST contain exactly one atom:title element.
// If the feed describes a Web resource, its content SHOULD be the same as that resource's title.
Title Atom03Content `xml:"http://purl.org/atom/ns# title"`
// The "atom:link" element is a Link construct that conveys a URI associated with the feed.
// The nature of the relationship as well as the link itself is determined by the element's content.
// atom:feed elements MUST contain at least one atom:link element with a rel attribute value of "alternate".
// atom:feed elements MUST NOT contain more than one atom:link element with a rel attribute value of "alternate" that has the same type attribute value.
// atom:feed elements MAY contain additional atom:link elements beyond those described above.
Links AtomLinks `xml:"http://purl.org/atom/ns# link"`
// The "atom:author" element is a Person construct that indicates the default author of the feed.
// atom:feed elements MUST contain exactly one atom:author element,
// UNLESS all of the atom:feed element's child atom:entry elements contain an atom:author element.
// atom:feed elements MUST NOT contain more than one atom:author element.
Author AtomPerson `xml:"http://purl.org/atom/ns# author"`
// The "atom:entry" element's represents an individual entry that is contained by the feed.
// atom:feed elements MAY contain one or more atom:entry elements.
Entries []Atom03Entry `xml:"http://purl.org/atom/ns# entry"`
}
type Atom03Entry struct {
// The "atom:id" element's content conveys a permanent, globally unique identifier for the entry.
// It MUST NOT change over time, even if other representations of the entry (such as a web representation pointed to by the entry's atom:link element) are relocated.
// If the same entry is syndicated in two atom:feeds published by the same entity, the entry's atom:id MUST be the same in both feeds.
ID string `xml:"id"`
// The "atom:title" element is a Content construct that conveys a human-readable title for the entry.
// atom:entry elements MUST have exactly one "atom:title" element.
// If an entry describes a Web resource, its content SHOULD be the same as that resource's title.
Title Atom03Content `xml:"title"`
// The "atom:modified" element is a Date construct that indicates the time that the entry was last modified.
// atom:entry elements MUST contain an atom:modified element, but MUST NOT contain more than one.
// The content of an atom:modified element MUST have a time zone whose value SHOULD be "UTC".
Modified string `xml:"modified"`
// The "atom:issued" element is a Date construct that indicates the time that the entry was issued.
// atom:entry elements MUST contain an atom:issued element, but MUST NOT contain more than one.
// The content of an atom:issued element MAY omit a time zone.
Issued string `xml:"issued"`
// The "atom:created" element is a Date construct that indicates the time that the entry was created.
// atom:entry elements MAY contain an atom:created element, but MUST NOT contain more than one.
// The content of an atom:created element MUST have a time zone whose value SHOULD be "UTC".
// If atom:created is not present, its content MUST considered to be the same as that of atom:modified.
Created string `xml:"created"`
// The "atom:link" element is a Link construct that conveys a URI associated with the entry.
// The nature of the relationship as well as the link itself is determined by the element's content.
// atom:entry elements MUST contain at least one atom:link element with a rel attribute value of "alternate".
// atom:entry elements MUST NOT contain more than one atom:link element with a rel attribute value of "alternate" that has the same type attribute value.
// atom:entry elements MAY contain additional atom:link elements beyond those described above.
Links AtomLinks `xml:"link"`
// The "atom:summary" element is a Content construct that conveys a short summary, abstract or excerpt of the entry.
// atom:entry elements MAY contain an atom:created element, but MUST NOT contain more than one.
Summary Atom03Content `xml:"summary"`
// The "atom:content" element is a Content construct that conveys the content of the entry.
// atom:entry elements MAY contain one or more atom:content elements.
Content Atom03Content `xml:"content"`
// The "atom:author" element is a Person construct that indicates the default author of the entry.
// atom:entry elements MUST contain exactly one atom:author element,
// UNLESS the atom:feed element containing them contains an atom:author element itself.
// atom:entry elements MUST NOT contain more than one atom:author element.
Author AtomPerson `xml:"author"`
}
type Atom03Content struct {
// Content constructs MAY have a "type" attribute, whose value indicates the media type of the content.
// When present, this attribute's value MUST be a registered media type [RFC2045].
// If not present, its value MUST be considered to be "text/plain".
Type string `xml:"type,attr"`
// Content constructs MAY have a "mode" attribute, whose value indicates the method used to encode the content.
// When present, this attribute's value MUST be listed below.
// If not present, its value MUST be considered to be "xml".
//
// "xml": A mode attribute with the value "xml" indicates that the element's content is inline xml (for example, namespace-qualified XHTML).
//
// "escaped": A mode attribute with the value "escaped" indicates that the element's content is an escaped string.
// Processors MUST unescape the element's content before considering it as content of the indicated media type.
//
// "base64": A mode attribute with the value "base64" indicates that the element's content is base64-encoded [RFC2045].
// Processors MUST decode the element's content before considering it as content of the the indicated media type.
Mode string `xml:"mode,attr"`
CharData string `xml:",chardata"`
InnerXML string `xml:",innerxml"`
}
func (a *Atom03Content) Content() string {
content := ""
switch {
case a.Mode == "xml":
content = a.InnerXML
case a.Mode == "escaped":
content = a.CharData
case a.Mode == "base64":
b, err := base64.StdEncoding.DecodeString(a.CharData)
if err == nil {
content = string(b)
}
default:
content = a.CharData
}
if a.Type != "text/html" {
content = html.EscapeString(content)
}
return strings.TrimSpace(content)
}
v2-2.2.6/internal/reader/atom/atom_03_adapter.go 0000664 0000000 0000000 00000005423 14756465373 0021407 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"log/slog"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/sanitizer"
"miniflux.app/v2/internal/urllib"
)
type Atom03Adapter struct {
atomFeed *Atom03Feed
}
func NewAtom03Adapter(atomFeed *Atom03Feed) *Atom03Adapter {
return &Atom03Adapter{atomFeed}
}
func (a *Atom03Adapter) BuildFeed(baseURL string) *model.Feed {
feed := new(model.Feed)
// Populate the feed URL.
feedURL := a.atomFeed.Links.firstLinkWithRelation("self")
if feedURL != "" {
if absoluteFeedURL, err := urllib.AbsoluteURL(baseURL, feedURL); err == nil {
feed.FeedURL = absoluteFeedURL
}
} else {
feed.FeedURL = baseURL
}
// Populate the site URL.
siteURL := a.atomFeed.Links.OriginalLink()
if siteURL != "" {
if absoluteSiteURL, err := urllib.AbsoluteURL(baseURL, siteURL); err == nil {
feed.SiteURL = absoluteSiteURL
}
} else {
feed.SiteURL = baseURL
}
// Populate the feed title.
feed.Title = a.atomFeed.Title.Content()
if feed.Title == "" {
feed.Title = feed.SiteURL
}
for _, atomEntry := range a.atomFeed.Entries {
entry := model.NewEntry()
// Populate the entry URL.
entry.URL = atomEntry.Links.OriginalLink()
if entry.URL != "" {
if absoluteEntryURL, err := urllib.AbsoluteURL(feed.SiteURL, entry.URL); err == nil {
entry.URL = absoluteEntryURL
}
}
// Populate the entry content.
entry.Content = atomEntry.Content.Content()
if entry.Content == "" {
entry.Content = atomEntry.Summary.Content()
}
// Populate the entry title.
entry.Title = atomEntry.Title.Content()
if entry.Title == "" {
entry.Title = sanitizer.TruncateHTML(entry.Content, 100)
}
if entry.Title == "" {
entry.Title = entry.URL
}
// Populate the entry author.
entry.Author = atomEntry.Author.PersonName()
if entry.Author == "" {
entry.Author = a.atomFeed.Author.PersonName()
}
// Populate the entry date.
for _, value := range []string{atomEntry.Issued, atomEntry.Modified, atomEntry.Created} {
if parsedDate, err := date.Parse(value); err == nil {
entry.Date = parsedDate
break
} else {
slog.Debug("Unable to parse date from Atom 0.3 feed",
slog.String("date", value),
slog.String("id", atomEntry.ID),
slog.Any("error", err),
)
}
}
if entry.Date.IsZero() {
entry.Date = time.Now()
}
// Generate the entry hash.
for _, value := range []string{atomEntry.ID, atomEntry.Links.OriginalLink()} {
if value != "" {
entry.Hash = crypto.Hash(value)
break
}
}
feed.Entries = append(feed.Entries, entry)
}
return feed
}
v2-2.2.6/internal/reader/atom/atom_03_test.go 0000664 0000000 0000000 00000023312 14756465373 0020743 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"bytes"
"testing"
"time"
)
func TestParseAtom03(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.23972003-12-13T08:29:29-04:002003-12-13T18:30:02ZIt's a testHTML content
]]>
`
feed, err := Parse("http://diveintomark.org/atom.xml", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if feed.Title != "dive into mark" {
t.Errorf("Incorrect title, got: %s", feed.Title)
}
if feed.FeedURL != "http://diveintomark.org/atom.xml" {
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
}
if feed.SiteURL != "http://diveintomark.org/" {
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
tz := time.FixedZone("Test Case Time", -int((4 * time.Hour).Seconds()))
if !feed.Entries[0].Date.Equal(time.Date(2003, time.December, 13, 8, 29, 29, 0, tz)) {
t.Errorf("Incorrect entry date, got: %v", feed.Entries[0].Date)
}
if feed.Entries[0].Hash != "b70d30334b808f32e66eb19fabb263525cecd18f205720b583e84f7f295cf728" {
t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash)
}
if feed.Entries[0].URL != "http://diveintomark.org/2003/12/13/atom03" {
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
}
if feed.Entries[0].Title != "Atom 0.3 snapshot" {
t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title)
}
if feed.Entries[0].Content != "
HTML content
" {
t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content)
}
if feed.Entries[0].Author != "Mark Pilgrim" {
t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author)
}
}
func TestParseAtom03WithoutSiteURL(t *testing.T) {
data := `
2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.2397`
feed, err := Parse("http://diveintomark.org/atom.xml", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if feed.SiteURL != "http://diveintomark.org/atom.xml" {
t.Errorf("Incorrect title, got: %s", feed.Title)
}
}
func TestParseAtom03WithoutFeedTitle(t *testing.T) {
data := `
2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.2397`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if feed.Title != "http://diveintomark.org/" {
t.Errorf("Incorrect title, got: %s", feed.Title)
}
}
func TestParseAtom03WithoutEntryTitleButWithLink(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark Pilgrimtag:diveintomark.org,2003:3.2397`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Title != "http://diveintomark.org/2003/12/13/atom03" {
t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title)
}
}
func TestParseAtom03WithoutEntryTitleButWithSummary(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark Pilgrimtag:diveintomark.org,2003:3.2397It's a test`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Title != "It's a test" {
t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title)
}
}
func TestParseAtom03WithoutEntryTitleButWithXMLContent(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark Pilgrimtag:diveintomark.org,2003:3.2397
Some text.
`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Title != "Some text." {
t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title)
}
}
func TestParseAtom03WithSummaryOnly(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.23972003-12-13T08:29:29-04:002003-12-13T18:30:02ZIt's a test`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Content != "It's a test" {
t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content)
}
}
func TestParseAtom03WithXMLContent(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.23972003-12-13T08:29:29-04:002003-12-13T18:30:02Z
Some text.
`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Content != "
Some text.
" {
t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content)
}
}
func TestParseAtom03WithBase64Content(t *testing.T) {
data := `
dive into mark2003-12-13T18:30:02ZMark PilgrimAtom 0.3 snapshottag:diveintomark.org,2003:3.23972003-12-13T08:29:29-04:002003-12-13T18:30:02ZPHA+U29tZSB0ZXh0LjwvcD4=`
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
if err != nil {
t.Fatal(err)
}
if len(feed.Entries) != 1 {
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
}
if feed.Entries[0].Content != "
Some text.
" {
t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content)
}
}
v2-2.2.6/internal/reader/atom/atom_10.go 0000664 0000000 0000000 00000017026 14756465373 0017707 0 ustar 00root root 0000000 0000000 // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"encoding/xml"
"html"
"strings"
"miniflux.app/v2/internal/reader/media"
)
// The "atom:feed" element is the document (i.e., top-level) element of
// an Atom Feed Document, acting as a container for metadata and data
// associated with the feed. Its element children consist of metadata
// elements followed by zero or more atom:entry child elements.
//
// Specs:
// https://tools.ietf.org/html/rfc4287
// https://validator.w3.org/feed/docs/atom.html
type Atom10Feed struct {
XMLName xml.Name `xml:"http://www.w3.org/2005/Atom feed"`
// The "atom:id" element conveys a permanent, universally unique
// identifier for an entry or feed.
//
// Its content MUST be an IRI, as defined by [RFC3987]. Note that the
// definition of "IRI" excludes relative references. Though the IRI
// might use a dereferencable scheme, Atom Processors MUST NOT assume it
// can be dereferenced.
//
// atom:feed elements MUST contain exactly one atom:id element.
ID string `xml:"http://www.w3.org/2005/Atom id"`
// The "atom:title" element is a Text construct that conveys a human-
// readable title for an entry or feed.
//
// atom:feed elements MUST contain exactly one atom:title element.
Title Atom10Text `xml:"http://www.w3.org/2005/Atom title"`
// The "atom:author" element is a Person construct that indicates the
// author of the entry or feed.
//
// atom:feed elements MUST contain one or more atom:author elements,
// unless all of the atom:feed element's child atom:entry elements
// contain at least one atom:author element.
Authors AtomPersons `xml:"http://www.w3.org/2005/Atom author"`
// The "atom:icon" element's content is an IRI reference [RFC3987] that
// identifies an image that provides iconic visual identification for a
// feed.
//
// atom:feed elements MUST NOT contain more than one atom:icon element.
Icon string `xml:"http://www.w3.org/2005/Atom icon"`
// The "atom:logo" element's content is an IRI reference [RFC3987] that
// identifies an image that provides visual identification for a feed.
//
// atom:feed elements MUST NOT contain more than one atom:logo element.
Logo string `xml:"http://www.w3.org/2005/Atom logo"`
// atom:feed elements SHOULD contain one atom:link element with a rel
// attribute value of "self". This is the preferred URI for
// retrieving Atom Feed Documents representing this Atom feed.
//
// atom:feed elements MUST NOT contain more than one atom:link
// element with a rel attribute value of "alternate" that has the
// same combination of type and hreflang attribute values.
Links AtomLinks `xml:"http://www.w3.org/2005/Atom link"`
// The "atom:category" element conveys information about a category
// associated with an entry or feed. This specification assigns no
// meaning to the content (if any) of this element.
//
// atom:feed elements MAY contain any number of atom:category
// elements.
Categories AtomCategories `xml:"http://www.w3.org/2005/Atom category"`
Entries []Atom10Entry `xml:"http://www.w3.org/2005/Atom entry"`
}
type Atom10Entry struct {
// The "atom:id" element conveys a permanent, universally unique
// identifier for an entry or feed.
//
// Its content MUST be an IRI, as defined by [RFC3987]. Note that the
// definition of "IRI" excludes relative references. Though the IRI
// might use a dereferencable scheme, Atom Processors MUST NOT assume it
// can be dereferenced.
//
// atom:entry elements MUST contain exactly one atom:id element.
ID string `xml:"http://www.w3.org/2005/Atom id"`
// The "atom:title" element is a Text construct that conveys a human-
// readable title for an entry or feed.
//
// atom:entry elements MUST contain exactly one atom:title element.
Title Atom10Text `xml:"http://www.w3.org/2005/Atom title"`
// The "atom:published" element is a Date construct indicating an
// instant in time associated with an event early in the life cycle of
// the entry.
Published string `xml:"http://www.w3.org/2005/Atom published"`
// The "atom:updated" element is a Date construct indicating the most
// recent instant in time when an entry or feed was modified in a way
// the publisher considers significant. Therefore, not all
// modifications necessarily result in a changed atom:updated value.
//
// atom:entry elements MUST contain exactly one atom:updated element.
Updated string `xml:"http://www.w3.org/2005/Atom updated"`
// atom:entry elements MUST NOT contain more than one atom:link
// element with a rel attribute value of "alternate" that has the
// same combination of type and hreflang attribute values.
Links AtomLinks `xml:"http://www.w3.org/2005/Atom link"`
// atom:entry elements MUST contain an atom:summary element in either
// of the following cases:
// * the atom:entry contains an atom:content that has a "src"
// attribute (and is thus empty).
// * the atom:entry contains content that is encoded in Base64;
// i.e., the "type" attribute of atom:content is a MIME media type
// [MIMEREG], but is not an XML media type [RFC3023], does not
// begin with "text/", and does not end with "/xml" or "+xml".
//
// atom:entry elements MUST NOT contain more than one atom:summary
// element.
Summary Atom10Text `xml:"http://www.w3.org/2005/Atom summary"`
// atom:entry elements MUST NOT contain more than one atom:content
// element.
Content Atom10Text `xml:"http://www.w3.org/2005/Atom content"`
// The "atom:author" element is a Person construct that indicates the
// author of the entry or feed.
//
// atom:entry elements MUST contain one or more atom:author elements
Authors AtomPersons `xml:"http://www.w3.org/2005/Atom author"`
// The "atom:category" element conveys information about a category
// associated with an entry or feed. This specification assigns no
// meaning to the content (if any) of this element.
//
// atom:entry elements MAY contain any number of atom:category
// elements.
Categories AtomCategories `xml:"http://www.w3.org/2005/Atom category"`
media.MediaItemElement
}
// A Text construct contains human-readable text, usually in small
// quantities. The content of Text constructs is Language-Sensitive.
// Specs: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1
// Text: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.1
// HTML: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.2
// XHTML: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.3
type Atom10Text struct {
Type string `xml:"type,attr"`
CharData string `xml:",chardata"`
InnerXML string `xml:",innerxml"`
XHTMLRootElement AtomXHTMLRootElement `xml:"http://www.w3.org/1999/xhtml div"`
}
func (a *Atom10Text) Body() string {
var content string
if strings.EqualFold(a.Type, "xhtml") {
content = a.xhtmlContent()
} else {
content = a.CharData
}
return strings.TrimSpace(content)
}
func (a *Atom10Text) Title() string {
var content string
switch {
case strings.EqualFold(a.Type, "xhtml"):
content = a.xhtmlContent()
case strings.Contains(a.InnerXML, "