Compare commits

..

7 Commits

Author SHA1 Message Date
0xsysr3ll
0a7529dc07 refactor(userlist): streamline user sorting logic and improve performance 2025-06-17 20:02:25 +02:00
0xsysr3ll
cbee8fd843 feat(userlist): implement local user sorting and update sorting logic
This prevents useless API calls when changing sorting filters.
2025-06-17 19:39:03 +02:00
0xsysr3ll
b9435427dc refactor(userlist): reorder sorting options to match columns order 2025-06-17 19:39:03 +02:00
0xsysr3ll
8ceec0f9c4 fix(userlist): bring back the sort dropdown
- Restore sort dropdown for better mobile usability
- Add new sort options (Type, Role) to match column header sorting
- Implement sort direction toggle (asc/desc) with intuitive icon indicator
- Fix displayname sorting in backend to properly sort by username/plexUsername
2025-06-17 19:39:03 +02:00
0xsysr3ll
5a1040bb61 fix(userlist): update cypress API intercepts to match user list requests 2025-06-17 19:39:03 +02:00
0xsysr3ll
a97a3f3512 refactor(userlist): remove unused sort messages from User List 2025-06-17 19:39:03 +02:00
0xsysr3ll
1dbacec4f9 feat(userlist): add sortable columns to User List 2025-06-17 19:39:03 +02:00
186 changed files with 7826 additions and 11962 deletions

View File

@@ -642,24 +642,6 @@
"contributions": [ "contributions": [
"code" "code"
] ]
},
{
"login": "sudo-kraken",
"name": "Joe Harrison",
"avatar_url": "https://avatars.githubusercontent.com/u/53116754?v=4",
"profile": "https://sudo-kraken.github.io/docs/",
"contributions": [
"infra"
]
},
{
"login": "ale183",
"name": "ale183",
"avatar_url": "https://avatars.githubusercontent.com/u/8809439?v=4",
"profile": "https://github.com/ale183",
"contributions": [
"code"
]
} }
] ]
} }

View File

@@ -4,7 +4,6 @@
#### To-Dos #### To-Dos
- [ ] Disclosed any use of AI (see our [policy](https://github.com/fallenbagel/jellyseerr/blob/develop/CONTRIBUTING.md#ai-assistance-notice))
- [ ] Successful build `pnpm build` - [ ] Successful build `pnpm build`
- [ ] Translation keys `pnpm i18n:extract` - [ ] Translation keys `pnpm i18n:extract`
- [ ] Database migration (if required) - [ ] Database migration (if required)

View File

@@ -7,14 +7,6 @@ on:
push: push:
branches: branches:
- develop - develop
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ci-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
test: test:
@@ -25,17 +17,14 @@ jobs:
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Pnpm Setup - name: Pnpm Setup
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with:
version: 9
- name: Get pnpm store directory - name: Get pnpm store directory
shell: sh shell: sh
run: | run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm cache - name: Setup pnpm cache
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
@@ -43,144 +32,137 @@ jobs:
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pnpm-store- ${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
env: env:
HUSKY: 0 HUSKY: 0
run: pnpm install run: pnpm install
- name: Lint - name: Lint
run: pnpm lint run: pnpm lint
- name: Formatting - name: Formatting
run: pnpm format:check run: pnpm format:check
- name: Build - name: Build
run: pnpm build run: pnpm build
build: build:
name: Build (per-arch, native runners) name: Build & Publish Docker Images
if: github.ref == 'refs/heads/develop' && !contains(github.event.head_commit.message, '[skip ci]') if: github.ref == 'refs/heads/develop' && !contains(github.event.head_commit.message, '[skip ci]')
strategy: strategy:
matrix: matrix:
include: include:
- runner: ubuntu-24.04 - runner: ubuntu-24.04
platform: linux/amd64 platform: linux/amd64
arch: amd64
- runner: ubuntu-24.04-arm - runner: ubuntu-24.04-arm
platform: linux/arm64 platform: linux/arm64
arch: arm64
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.runner }}
outputs:
digest-amd64: ${{ steps.set_outputs.outputs.digest-amd64 }}
digest-arm64: ${{ steps.set_outputs.outputs.digest-arm64 }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Commit timestamp
id: ts
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Warm cache (no push) — ${{ matrix.platform }}
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: ${{ matrix.platform }}
push: false
build-args: |
COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=develop
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
cache-from: type=gha,scope=${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
provenance: false
publish:
name: Publish multi-arch image
needs: build
runs-on: ubuntu-24.04
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Commit timestamp
id: ts
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }} password: ${{ secrets.DOCKER_TOKEN }}
- name: Log in to GitHub Container Registry - name: Log in to GitHub Container Registry
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Set lower case owner name
- name: Extract metadata run: |
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
env:
OWNER: ${{ github.repository_owner }}
- name: Docker metadata
id: meta id: meta
uses: docker/metadata-action@v5 uses: docker/metadata-action@v4
with: with:
images: | images: |
${{ github.repository }} fallenbagel/jellyseerr
ghcr.io/${{ github.repository }} ghcr.io/${{ env.OWNER_LC }}/jellyseerr
tags: | tags: |
type=raw,value=develop type=ref,event=branch
type=sha type=sha,prefix=,suffix=,format=short
labels: | - name: Build and push by digest
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }} id: build
uses: docker/build-push-action@v5
- name: Build & Push (multi-arch, single tag)
uses: docker/build-push-action@v6
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
platforms: linux/amd64,linux/arm64 platforms: ${{ matrix.platform }}
push: true push: true
build-args: | build-args: |
COMMIT_TAG=${{ github.sha }} COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=develop BUILD_VERSION=develop
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }} BUILD_DATE=${{ github.event.repository.updated_at }}
labels: ${{ steps.meta.outputs.labels }} outputs: |
tags: ${{ steps.meta.outputs.tags }} type=image,push-by-digest=true,name=fallenbagel/jellyseerr,push=true
cache-from: | type=image,push-by-digest=true,name=ghcr.io/${{ env.OWNER_LC }}/jellyseerr,push=true
type=gha,scope=linux/amd64 cache-from: type=gha,scope=${{ matrix.platform }}
type=gha,scope=linux/arm64 cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
cache-to: type=gha,mode=max
provenance: false provenance: false
- name: Set outputs
id: set_outputs
run: |
platform="${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }}"
echo "digest-${platform}=${{ steps.build.outputs.digest }}" >> $GITHUB_OUTPUT
merge_and_push:
name: Create and Push Multi-arch Manifest
needs: build
runs-on: ubuntu-24.04
steps:
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set lower case owner name
run: |
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
env:
OWNER: ${{ github.repository_owner }}
- name: Create and push manifest
run: |
docker manifest create fallenbagel/jellyseerr:develop \
--amend fallenbagel/jellyseerr@${{ needs.build.outputs.digest-amd64 }} \
--amend fallenbagel/jellyseerr@${{ needs.build.outputs.digest-arm64 }}
docker manifest push fallenbagel/jellyseerr:develop
# GHCR manifest
docker manifest create ghcr.io/${{ env.OWNER_LC }}/jellyseerr:develop \
--amend ghcr.io/${{ env.OWNER_LC }}/jellyseerr@${{ needs.build.outputs.digest-amd64 }} \
--amend ghcr.io/${{ env.OWNER_LC }}/jellyseerr@${{ needs.build.outputs.digest-arm64 }}
docker manifest push ghcr.io/${{ env.OWNER_LC }}/jellyseerr:develop
discord: discord:
name: Send Discord Notification name: Send Discord Notification
needs: publish needs: merge_and_push
if: always() && github.event_name != 'pull_request' && !contains(github.event.head_commit.message, '[skip ci]') if: always() && github.event_name != 'pull_request' && !contains(github.event.head_commit.message, '[skip ci]')
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
steps: steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v3
- name: Combine Job Status - name: Combine Job Status
id: status id: status
run: | run: |
failures=(neutral, skipped, timed_out, action_required) failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ ${{ needs.publish.result }} ]]; then if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo "status=failure" >> $GITHUB_OUTPUT echo "status=failure" >> $GITHUB_OUTPUT
else else
echo "status=${{ needs.publish.result }}" >> $GITHUB_OUTPUT echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi fi
- name: Post Status to Discord - name: Post Status to Discord
uses: sarisia/actions-status-discord@v1 uses: sarisia/actions-status-discord@v1
with: with:

View File

@@ -3,52 +3,39 @@ name: 'CodeQL'
on: on:
push: push:
branches: ['develop'] branches: ['develop']
paths-ignore:
- '**/*.md'
- 'docs/**'
pull_request: pull_request:
branches: ['develop'] branches: ['develop']
paths-ignore:
- '**/*.md'
- 'docs/**'
schedule: schedule:
- cron: '50 7 * * 5' - cron: '50 7 * * 5'
permissions:
contents: read
concurrency:
group: codeql-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
timeout-minutes: 10
permissions: permissions:
actions: read
contents: read contents: read
security-events: write security-events: write
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
language: [actions, javascript] language: [javascript]
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v3 uses: github/codeql-action/init@v2
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
queries: +security-and-quality queries: +security-and-quality
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@v3 uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3 uses: github/codeql-action/analyze@v2
with: with:
category: '/language:${{ matrix.language }}' category: '/language:${{ matrix.language }}'

View File

@@ -2,24 +2,18 @@ name: Merge Conflict Labeler
on: on:
push: push:
branches: [develop] branches:
- develop
pull_request_target: pull_request_target:
branches: [develop] branches:
types: [opened, synchronize, reopened] - develop
types: [synchronize]
permissions:
contents: read
concurrency:
group: merge-conflict-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
label: label:
name: Labeling name: Labeling
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
timeout-minutes: 10 if: ${{ github.repository == 'Fallenbagel/jellyseerr' }}
permissions: permissions:
contents: read contents: read
pull-requests: write pull-requests: write

View File

@@ -2,49 +2,26 @@ name: Cypress Tests
on: on:
pull_request: pull_request:
branches: ['*'] branches:
paths-ignore: - '*'
- '**/*.md'
- 'docs/**'
push: push:
branches: [develop] branches:
paths-ignore: - develop
- '**/*.md'
- 'docs/**'
permissions:
contents: read
concurrency:
group: cypress-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
cypress-run: cypress-run:
name: Cypress Run runs-on: ubuntu-latest
runs-on: ubuntu-24.04
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version-file: package.json node-version: 22
- name: Pnpm Setup - name: Pnpm Setup
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
- name: Setup cypress cache
uses: actions/cache@v4
with: with:
path: ~/.cache/Cypress version: 9
key: ${{ runner.os }}-cypress-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-cypress-store-
- name: Cypress run - name: Cypress run
uses: cypress-io/github-action@v6 uses: cypress-io/github-action@v6
with: with:
@@ -59,7 +36,6 @@ jobs:
# Fix test titles in cypress dashboard # Fix test titles in cypress dashboard
COMMIT_INFO_MESSAGE: ${{github.event.pull_request.title}} COMMIT_INFO_MESSAGE: ${{github.event.pull_request.title}}
COMMIT_INFO_SHA: ${{github.event.pull_request.head.sha}} COMMIT_INFO_SHA: ${{github.event.pull_request.head.sha}}
- name: Upload video files - name: Upload video files
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4

View File

@@ -8,30 +8,24 @@ on:
- 'docs/**' - 'docs/**'
- 'gen-docs/**' - 'gen-docs/**'
permissions:
contents: read
concurrency:
group: pages
cancel-in-progress: true
jobs: jobs:
build: build:
name: Build Docusaurus name: Build Docusaurus
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version-file: package.json node-version: 20
- name: Pnpm Setup - name: Pnpm Setup
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with:
version: 9
- name: Get pnpm store directory - name: Get pnpm store directory
shell: sh shell: sh
@@ -52,26 +46,38 @@ jobs:
pnpm install --frozen-lockfile pnpm install --frozen-lockfile
- name: Build website - name: Build website
working-directory: gen-docs run: |
run: pnpm build cd gen-docs
pnpm build
- name: Upload Build Artifact - name: Upload Build Artifact
uses: actions/upload-pages-artifact@v4 uses: actions/upload-pages-artifact@v3
with: with:
path: gen-docs/build path: gen-docs/build
deploy: deploy:
name: Deploy to GitHub Pages name: Deploy to GitHub Pages
needs: build needs: build
runs-on: ubuntu-24.04 concurrency: build-deploy-pages
# Grant GITHUB_TOKEN the permissions required to make a Pages deployment
permissions: permissions:
contents: read pages: write # to deploy to Pages
pages: write id-token: write # to verify the deployment originates from an appropriate source
id-token: write
# Deploy to the github-pages environment
environment: environment:
name: github-pages name: github-pages
url: ${{ steps.deployment.outputs.page_url }} url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps: steps:
# - name: Download Build Artifact
# uses: actions/download-artifact@v4
# with:
# name: docusaurus-build
# path: gen-docs/build
- name: Deploy to GitHub Pages - name: Deploy to GitHub Pages
id: deployment id: deployment
uses: actions/deploy-pages@v4 uses: actions/deploy-pages@v4

View File

@@ -4,21 +4,11 @@ on:
push: push:
branches: branches:
- develop - develop
paths:
- 'charts/**'
- '.github/workflows/release-charts.yml'
permissions:
contents: read
concurrency:
group: helm-charts
cancel-in-progress: true
jobs: jobs:
package-helm-chart: package-helm-chart:
name: Package helm chart name: Package helm chart
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
permissions: permissions:
contents: read contents: read
packages: read packages: read
@@ -29,7 +19,6 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Install helm - name: Install helm
uses: azure/setup-helm@v4 uses: azure/setup-helm@v4
@@ -53,11 +42,16 @@ jobs:
# get current version # get current version
current_version=$(grep '^version:' "$chart_path/Chart.yaml" | awk '{print $2}') current_version=$(grep '^version:' "$chart_path/Chart.yaml" | awk '{print $2}')
# try to get current release version # try to get current release version
if oras manifest fetch "ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:${current_version}" >/dev/null 2>&1; then set +e
echo "No version change for $chart_name. Skipping." oras discover ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:${current_version}
else oras_exit_code=$?
set -e
if [ $oras_exit_code -ne 0 ]; then
helm dependency build "$chart_path" helm dependency build "$chart_path"
helm package "$chart_path" --destination ./.cr-release-packages helm package "$chart_path" --destination ./.cr-release-packages
else
echo "No version change for $chart_name. Skipping."
fi fi
else else
echo "Skipping $chart_name: Not a valid Helm chart" echo "Skipping $chart_name: Not a valid Helm chart"
@@ -67,7 +61,7 @@ jobs:
- name: Check if artifacts exist - name: Check if artifacts exist
id: check-artifacts id: check-artifacts
run: | run: |
if ls .cr-release-packages/*.tgz >/dev/null 2>&1; then if ls .cr-release-packages/* >/dev/null 2>&1; then
echo "has_artifacts=true" >> $GITHUB_OUTPUT echo "has_artifacts=true" >> $GITHUB_OUTPUT
else else
echo "has_artifacts=false" >> $GITHUB_OUTPUT echo "has_artifacts=false" >> $GITHUB_OUTPUT
@@ -83,7 +77,7 @@ jobs:
publish: publish:
name: Publish to ghcr.io name: Publish to ghcr.io
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
permissions: permissions:
packages: write # needed for pushing to github registry packages: write # needed for pushing to github registry
id-token: write # needed for signing the images with GitHub OIDC Token id-token: write # needed for signing the images with GitHub OIDC Token
@@ -94,7 +88,6 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Install helm - name: Install helm
uses: azure/setup-helm@v4 uses: azure/setup-helm@v4

View File

@@ -7,48 +7,27 @@ on:
paths: paths:
- '.github/workflows/lint-helm-charts.yml' - '.github/workflows/lint-helm-charts.yml'
- 'charts/**' - 'charts/**'
push:
branches: [develop]
paths:
- 'charts/**'
permissions:
contents: read
concurrency:
group: charts-lint-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
lint-test: lint-test:
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Set up Helm - name: Set up Helm
uses: azure/setup-helm@v4 uses: azure/setup-helm@v4.2.0
- name: Set up chart-testing
uses: helm/chart-testing-action@v2
- name: Ensure documentation is updated - name: Ensure documentation is updated
uses: docker://jnorwood/helm-docs:v1.14.2 uses: docker://jnorwood/helm-docs:v1.14.2
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.6.1
- name: Run chart-testing (list-changed) - name: Run chart-testing (list-changed)
id: list-changed id: list-changed
run: | run: |
changed=$(ct list-changed --target-branch ${{ github.event.repository.default_branch }}) changed=$(ct list-changed --target-branch ${{ github.event.repository.default_branch }})
if [[ -n "$changed" ]]; then if [[ -n "$changed" ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT" echo "changed=true" >> "$GITHUB_OUTPUT"
echo "$changed"
fi fi
- name: Run chart-testing - name: Run chart-testing
if: steps.list-changed.outputs.changed == 'true' if: steps.list-changed.outputs.changed == 'true'
run: ct lint --target-branch ${{ github.event.repository.default_branch }} --validate-maintainers=false run: ct lint --target-branch ${{ github.event.repository.default_branch }} --validate-maintainers=false

View File

@@ -4,125 +4,28 @@ on:
push: push:
tags: tags:
- 'preview-*' - 'preview-*'
workflow_dispatch:
permissions:
contents: read
concurrency:
group: preview-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
build: build_and_push:
name: Build (per-arch, native runners) name: Build & Publish Docker Preview Images
strategy: runs-on: ubuntu-22.04
matrix:
include:
- runner: ubuntu-24.04
platform: linux/amd64
arch: amd64
- runner: ubuntu-24.04-arm
platform: linux/arm64
arch: arm64
runs-on: ${{ matrix.runner }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with: - name: Get the version
persist-credentials: false id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- name: Commit timestamp - name: Set up QEMU
id: ts uses: docker/setup-qemu-action@v3
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Derive preview version from tag
id: ver
shell: bash
run: |
TAG="${GITHUB_REF_NAME}"
VER="${TAG#preview-}"
VER="${VER#v}"
echo "version=${VER}" >> "$GITHUB_OUTPUT"
echo "Building preview version: ${VER}"
- name: Warm cache (no push) — ${{ matrix.platform }}
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: ${{ matrix.platform }}
push: false
build-args: |
COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=${{ steps.ver.outputs.version }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
cache-from: type=gha,scope=${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
provenance: false
publish:
name: Publish multi-arch image
needs: build
runs-on: ubuntu-24.04
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Commit timestamp
id: ts
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }} password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push
- name: Log in to GitHub Container Registry uses: docker/build-push-action@v5
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Derive preview version from tag
id: ver
shell: bash
run: |
TAG="${GITHUB_REF_NAME}"
VER="${TAG#preview-}"
VER="${VER#v}"
echo "version=${VER}" >> "$GITHUB_OUTPUT"
echo "Publishing preview version: ${VER}"
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ github.repository }}
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=preview-${{ steps.ver.outputs.version }}
labels: |
org.opencontainers.image.version=preview-${{ steps.ver.outputs.version }}
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
- name: Build & Push (multi-arch, single tag)
uses: docker/build-push-action@v6
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
@@ -130,12 +33,7 @@ jobs:
push: true push: true
build-args: | build-args: |
COMMIT_TAG=${{ github.sha }} COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=${{ steps.ver.outputs.version }} BUILD_VERSION=${{ steps.get_version.outputs.VERSION }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }} BUILD_DATE=${{ github.event.repository.updated_at }}
labels: ${{ steps.meta.outputs.labels }} tags: |
tags: ${{ steps.meta.outputs.tags }} fallenbagel/jellyseerr:${{ steps.get_version.outputs.VERSION }}
cache-from: |
type=gha,scope=linux/amd64
type=gha,scope=linux/arm64
cache-to: type=gha,mode=max
provenance: false

View File

@@ -1,14 +1,6 @@
name: Jellyseerr Release name: Jellyseer Release
on: on: workflow_dispatch
workflow_dispatch:
permissions:
contents: read
concurrency:
group: release-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
semantic-release: semantic-release:
@@ -16,29 +8,38 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
env: env:
HUSKY: 0 HUSKY: 0
outputs:
new_release_published: ${{ steps.release.outputs.new_release_published }}
new_release_version: ${{ steps.release.outputs.new_release_version }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version-file: package.json node-version: 22
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GH_TOKEN }}
- name: Pnpm Setup - name: Pnpm Setup
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with:
version: 9
- name: Get pnpm store directory - name: Get pnpm store directory
shell: sh shell: sh
run: | run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm cache - name: Setup pnpm cache
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
@@ -46,151 +47,77 @@ jobs:
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pnpm-store- ${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install run: pnpm install
- name: Release - name: Release
id: release
uses: cycjimmy/semantic-release-action@v5
with:
extra_plugins: |
@semantic-release/git@10
@semantic-release/changelog@6
@codedependant/semantic-release-docker@5
env: env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: npx semantic-release
build: # build-snap:
name: Build (per-arch, native runners) # name: Build Snap Package (${{ matrix.architecture }})
needs: semantic-release # needs: semantic-release
if: needs.semantic-release.outputs.new_release_published == 'true' # runs-on: ubuntu-22.04
strategy: # strategy:
matrix: # fail-fast: false
include: # matrix:
- runner: ubuntu-24.04 # architecture:
platform: linux/amd64 # - amd64
arch: amd64 # - arm64
- runner: ubuntu-24.04-arm # steps:
platform: linux/arm64 # - name: Checkout Code
arch: arm64 # uses: actions/checkout@v4
runs-on: ${{ matrix.runner }} # with:
steps: # fetch-depth: 0
- name: Checkout # - name: Switch to main branch
uses: actions/checkout@v4 # run: git checkout main
with: # - name: Pull latest changes
persist-credentials: false # run: git pull
# - name: Prepare
- name: Commit timestamp # id: prepare
id: ts # run: |
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT" # git fetch --prune --tags
# if [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
- name: Set up Docker Buildx # echo "RELEASE=stable" >> $GITHUB_OUTPUT
uses: docker/setup-buildx-action@v3 # else
# echo "RELEASE=edge" >> $GITHUB_OUTPUT
- name: Warm cache (no push) — ${{ matrix.platform }} # fi
uses: docker/build-push-action@v6 # - name: Set Up QEMU
with: # uses: docker/setup-qemu-action@v3
context: . # with:
file: ./Dockerfile # image: tonistiigi/binfmt@sha256:df15403e06a03c2f461c1f7938b171fda34a5849eb63a70e2a2109ed5a778bde
platforms: ${{ matrix.platform }} # - name: Build Snap Package
push: false # uses: diddlesnaps/snapcraft-multiarch-action@v1
build-args: | # id: build
COMMIT_TAG=${{ github.sha }} # with:
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }} # architecture: ${{ matrix.architecture }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }} # - name: Upload Snap Package
cache-from: type=gha,scope=${{ matrix.platform }} # uses: actions/upload-artifact@v4
cache-to: type=gha,mode=max,scope=${{ matrix.platform }} # with:
provenance: false # name: jellyseerr-snap-package-${{ matrix.architecture }}
# path: ${{ steps.build.outputs.snap }}
publish: # - name: Review Snap Package
name: Publish multi-arch image # uses: diddlesnaps/snapcraft-review-tools-action@v1
needs: [semantic-release, build] # with:
if: needs.semantic-release.outputs.new_release_published == 'true' # snap: ${{ steps.build.outputs.snap }}
runs-on: ubuntu-24.04 # - name: Publish Snap Package
permissions: # uses: snapcore/action-publish@v1
contents: read # env:
id-token: write # SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_LOGIN }}
packages: write # with:
steps: # snap: ${{ steps.build.outputs.snap }}
- name: Checkout # release: ${{ steps.prepare.outputs.RELEASE }}
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Commit timestamp
id: ts
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ github.repository }}
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=${{ needs.semantic-release.outputs.new_release_version }}
labels: |
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
- name: Build & Push (multi-arch, single tag)
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
build-args: |
COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
labels: ${{ steps.meta.outputs.labels }}
tags: ${{ steps.meta.outputs.tags }}
cache-from: |
type=gha,scope=linux/amd64
type=gha,scope=linux/arm64
cache-to: type=gha,mode=max
provenance: false
- name: Also tag :latest (non-pre-release only)
shell: bash
run: |
VER="${{ needs.semantic-release.outputs.new_release_version }}"
if [[ "$VER" != *"-"* ]]; then
docker buildx imagetools create \
-t ${{ github.repository }}:latest \
${{ github.repository }}:${VER}
docker buildx imagetools create \
-t ghcr.io/${{ github.repository }}:latest \
ghcr.io/${{ github.repository }}:${VER}
fi
discord: discord:
name: Send Discord Notification name: Send Discord Notification
needs: publish needs: semantic-release
if: always() if: always()
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
steps: steps:
- name: Get Build Job Status - name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v3 uses: technote-space/workflow-conclusion-action@v3
- name: Combine Job Status - name: Combine Job Status
id: status id: status
run: | run: |
@@ -200,7 +127,6 @@ jobs:
else else
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi fi
- name: Post Status to Discord - name: Post Status to Discord
uses: sarisia/actions-status-discord@v1 uses: sarisia/actions-status-discord@v1
with: with:

94
.github/workflows/snap.yaml.disabled vendored Normal file
View File

@@ -0,0 +1,94 @@
name: Publish Snap
# turn off edge snap builds temporarily and make it manual
# on:
# push:
# branches:
# - develop
on: workflow_dispatch
jobs:
jobs:
name: Job Check
runs-on: ubuntu-22.04
if: "!contains(github.event.head_commit.message, '[skip ci]')"
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.12.1
with:
access_token: ${{ secrets.GITHUB_TOKEN }}
build-snap:
name: Build Snap Package (${{ matrix.architecture }})
needs: jobs
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
architecture:
- amd64
- arm64
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Prepare
id: prepare
run: |
git fetch --prune --unshallow --tags
if [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
echo "RELEASE=stable" >> $GITHUB_OUTPUT
else
echo "RELEASE=edge" >> $GITHUB_OUTPUT
fi
- name: Set Up QEMU
uses: docker/setup-qemu-action@v3
- name: Configure Git
run: git config --add safe.directory /data/parts/jellyseerr/src
- name: Build Snap Package
uses: diddlesnaps/snapcraft-multiarch-action@v1
id: build
with:
architecture: ${{ matrix.architecture }}
- name: Upload Snap Package
uses: actions/upload-artifact@v4
with:
name: jellyseerr-snap-package-${{ matrix.architecture }}
path: ${{ steps.build.outputs.snap }}
- name: Review Snap Package
uses: diddlesnaps/snapcraft-review-tools-action@v1
with:
snap: ${{ steps.build.outputs.snap }}
- name: Publish Snap Package
uses: snapcore/action-publish@v1
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_LOGIN }}
with:
snap: ${{ steps.build.outputs.snap }}
release: ${{ steps.prepare.outputs.RELEASE }}
discord:
name: Send Discord Notification
needs: build-snap
if: always() && !contains(github.event.head_commit.message, '[skip ci]')
runs-on: ubuntu-22.04
steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v3
- name: Combine Job Status
id: status
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo "status=failure" >> $GITHUB_OUTPUT
else
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ steps.status.outputs.status }}
title: ${{ github.workflow }}
nofail: true

View File

@@ -4,53 +4,22 @@ on:
issues: issues:
types: [labeled, unlabeled, reopened] types: [labeled, unlabeled, reopened]
permissions:
issues: read
concurrency:
group: support-${{ github.event.issue.number }}
cancel-in-progress: true
jobs: jobs:
support: support:
if: github.event.label.name == 'support' || github.event.action == 'reopened' runs-on: ubuntu-latest
runs-on: ubuntu-24.04
permissions:
issues: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
NUMBER: ${{ github.event.issue.number }}
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
steps: steps:
- name: Label added, comment and close issue - uses: dessant/support-requests@v4
if: github.event.action == 'labeled' && github.event.label.name == 'support' with:
shell: bash github-token: ${{ github.token }}
env: support-label: 'support'
BODY: > issue-comment: >
:wave: @${{ env.ISSUE_AUTHOR }}, we use the issue tracker exclusively :wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears for bug reports and feature requests. However, this issue appears
to be a support request. Please use our support channels to be a support request. Please use our support channels
to get help with Jellyseerr. to get help with Jellyseerr.
- [Discord](https://discord.gg/ckbvBtDJgC) - [Discord](https://discord.gg/ckbvBtDJgC)
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue comment "$NUMBER" -R "$GH_REPO" -b "$BODY" || true
retry gh issue close "$NUMBER" -R "$GH_REPO" || true
gh issue lock "$NUMBER" -R "$GH_REPO" -r "off_topic" || true
- name: Reopened or label removed, unlock issue close-issue: true
if: github.event.action == 'unlabeled' && github.event.label.name == 'support' lock-issue: true
shell: bash issue-lock-reason: 'off-topic'
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue reopen "$NUMBER" -R "$GH_REPO" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true
- name: Remove support label on manual reopen
if: github.event.action == 'reopened'
shell: bash
run: |
gh issue edit "$NUMBER" -R "$GH_REPO" --remove-label "support" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true

View File

@@ -8,32 +8,24 @@ on:
- 'docs/**' - 'docs/**'
- 'gen-docs/**' - 'gen-docs/**'
permissions:
contents: read
concurrency:
group: docs-pr-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
test-deploy: test-deploy:
name: Test deployment name: Test deployment
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version-file: package.json node-version: 20
- name: Pnpm Setup - name: Pnpm Setup
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with:
version: 9
- name: Get pnpm store directory - name: Get pnpm store directory
shell: sh shell: sh
@@ -50,7 +42,7 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
cd gen-docs cd gen-docs
pnpm install --frozen-lockfile pnpm install --frozen-lockfile
- name: Build website - name: Build website

View File

@@ -4,16 +4,11 @@ dist/
config/ config/
CHANGELOG.md CHANGELOG.md
pnpm-lock.yaml pnpm-lock.yaml
cypress/config/settings.cypress.json
# assets # assets
src/assets/ src/assets/
public/ public/
!public/sw.js
docs/ docs/
!/public/
/public/*
!/public/sw.js
# helm charts # helm charts
**/charts **/charts

View File

@@ -21,11 +21,5 @@ module.exports = {
rangeEnd: 0, // default: Infinity rangeEnd: 0, // default: Infinity
}, },
}, },
{
files: 'cypress/config/settings.cypress.json',
options: {
rangeEnd: 0,
},
},
], ],
}; };

View File

@@ -20,8 +20,5 @@
"files.associations": { "files.associations": {
"globals.css": "tailwindcss" "globals.css": "tailwindcss"
}, },
"i18n-ally.localesPaths": [ "i18n-ally.localesPaths": ["src/i18n/locale"]
"src/i18n/locale"
],
"yaml.format.singleQuote": true
} }

View File

@@ -2,45 +2,6 @@
All help is welcome and greatly appreciated! If you would like to contribute to the project, the following instructions should get you started... All help is welcome and greatly appreciated! If you would like to contribute to the project, the following instructions should get you started...
## AI Assistance Notice
> [!IMPORTANT]
>
> If you are using **any kind of AI assistance** to contribute to Jellyseerr,
> it must be disclosed in the pull request.
If you are using any kind of AI assistance while contributing to Jellyseerr,
**this must be disclosed in the pull request**, along with the extent to
which AI assistance was used (e.g. docs only vs. code generation).
If PR responses are being generated by an AI, disclose that as well.
As a small exception, trivial tab-completion doesn't need to be disclosed,
so long as it is limited to single keywords or short phrases.
An example disclosure:
> This PR was written primarily by Claude Code.
Or a more detailed disclosure:
> I consulted ChatGPT to understand the codebase but the solution
> was fully authored manually by myself.
Failure to disclose this is first and foremost rude to the human operators
on the other end of the pull request, but it also makes it difficult to
determine how much scrutiny to apply to the contribution.
In a perfect world, AI assistance would produce equal or higher quality
work than any human. That isn't the world we live in today, and in most cases
it's generating slop. I say this despite being a fan of and using them
successfully myself (with heavy supervision)!
When using AI assistance, we expect contributors to understand the code
that is produced and be able to answer critical questions about it. It
isn't a maintainers job to review a PR so broken that it requires
significant rework to be acceptable.
Please be respectful to maintainers and disclose AI assistance.
## Development ## Development
### Tools Required ### Tools Required
@@ -197,4 +158,4 @@ DB_TYPE="postgres" DB_USER=postgres DB_PASS=postgres pnpm migration:generate ser
## Attribution ## Attribution
This contribution guide was inspired by the [Next.js](https://github.com/vercel/next.js), [Radarr](https://github.com/Radarr/Radarr), [Overseerr](https://github.com/sct/Overseerr) and [Ghostty](https://github.com/ghostty-org/ghostty) contribution guides. This contribution guide was inspired by the [Next.js](https://github.com/vercel/next.js), [Radarr](https://github.com/Radarr/Radarr), and [Overseerr](https://github.com/sct/Overseerr) contribution guides.

View File

@@ -2,11 +2,8 @@ FROM node:22-alpine AS BUILD_IMAGE
WORKDIR /app WORKDIR /app
ARG SOURCE_DATE_EPOCH
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG COMMIT_TAG
ENV TARGETPLATFORM=${TARGETPLATFORM:-linux/amd64} ENV TARGETPLATFORM=${TARGETPLATFORM:-linux/amd64}
ENV COMMIT_TAG=${COMMIT_TAG}
RUN \ RUN \
case "${TARGETPLATFORM}" in \ case "${TARGETPLATFORM}" in \
@@ -17,27 +14,47 @@ RUN \
;; \ ;; \
esac esac
RUN npm install --global pnpm@10 RUN npm install --global pnpm@9
COPY package.json pnpm-lock.yaml postinstall-win.js ./ COPY package.json pnpm-lock.yaml postinstall-win.js ./
RUN CYPRESS_INSTALL_BINARY=0 pnpm install --frozen-lockfile RUN CYPRESS_INSTALL_BINARY=0 pnpm install --frozen-lockfile
COPY . ./ COPY . ./
ARG COMMIT_TAG
ENV COMMIT_TAG=${COMMIT_TAG}
RUN pnpm build RUN pnpm build
# remove development dependencies # remove development dependencies
RUN pnpm prune --prod --ignore-scripts && \ RUN pnpm prune --prod --ignore-scripts
rm -rf src server .next/cache charts gen-docs docs && \
touch config/DOCKER && \ RUN rm -rf src server .next/cache charts gen-docs docs
echo "{\"commitTag\": \"${COMMIT_TAG}\"}" > committag.json
RUN touch config/DOCKER
RUN echo "{\"commitTag\": \"${COMMIT_TAG}\"}" > committag.json
FROM node:22-alpine FROM node:22-alpine
# OCI Meta information
ARG BUILD_DATE
ARG BUILD_VERSION
LABEL \
org.opencontainers.image.authors="Fallenbagel" \
org.opencontainers.image.source="https://github.com/fallenbagel/jellyseerr" \
org.opencontainers.image.created=${BUILD_DATE} \
org.opencontainers.image.version=${BUILD_VERSION} \
org.opencontainers.image.title="Jellyseerr" \
org.opencontainers.image.description="Open-source media request and discovery manager for Jellyfin, Plex, and Emby." \
org.opencontainers.image.licenses="MIT"
WORKDIR /app WORKDIR /app
RUN apk add --no-cache tzdata tini && rm -rf /tmp/* RUN apk add --no-cache tzdata tini && rm -rf /tmp/*
RUN npm install -g pnpm@10 RUN npm install -g pnpm@9
# copy from build image # copy from build image
COPY --from=BUILD_IMAGE /app ./ COPY --from=BUILD_IMAGE /app ./

View File

@@ -3,7 +3,7 @@ FROM node:22-alpine
COPY . /app COPY . /app
WORKDIR /app WORKDIR /app
RUN npm install --global pnpm@10 RUN npm install --global pnpm@9
RUN pnpm install RUN pnpm install

View File

@@ -11,7 +11,7 @@
<a href="http://translate.jellyseerr.dev/engage/jellyseerr/"><img src="http://translate.jellyseerr.dev/widget/jellyseerr/jellyseerr-frontend/svg-badge.svg" alt="Translation status" /></a> <a href="http://translate.jellyseerr.dev/engage/jellyseerr/"><img src="http://translate.jellyseerr.dev/widget/jellyseerr/jellyseerr-frontend/svg-badge.svg" alt="Translation status" /></a>
<a href="https://github.com/fallenbagel/jellyseerr/blob/develop/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/fallenbagel/jellyseerr"></a> <a href="https://github.com/fallenbagel/jellyseerr/blob/develop/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/fallenbagel/jellyseerr"></a>
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section --> <!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
<a href="#contributors-"><img alt="All Contributors" src="https://img.shields.io/badge/all_contributors-71-orange.svg"/></a> <a href="#contributors-"><img alt="All Contributors" src="https://img.shields.io/badge/all_contributors-69-orange.svg"/></a>
<!-- ALL-CONTRIBUTORS-BADGE:END --> <!-- ALL-CONTRIBUTORS-BADGE:END -->
**Jellyseerr** is a free and open source software application for managing requests for your media library. It integrates with the media server of your choice: [Jellyfin](https://jellyfin.org), [Plex](https://plex.tv), and [Emby](https://emby.media/). In addition, it integrates with your existing services, such as **[Sonarr](https://sonarr.tv/)**, **[Radarr](https://radarr.video/)**. **Jellyseerr** is a free and open source software application for managing requests for your media library. It integrates with the media server of your choice: [Jellyfin](https://jellyfin.org), [Plex](https://plex.tv), and [Emby](https://emby.media/). In addition, it integrates with your existing services, such as **[Sonarr](https://sonarr.tv/)**, **[Radarr](https://radarr.video/)**.
@@ -173,10 +173,6 @@ Thanks goes to these wonderful people from Overseerr ([emoji key](https://allcon
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JamsRepos"><img src="https://avatars.githubusercontent.com/u/1347620?v=4?s=100" width="100px;" alt="Jam"/><br /><sub><b>Jam</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=JamsRepos" title="Code">💻</a></td> <td align="center" valign="top" width="14.28%"><a href="https://github.com/JamsRepos"><img src="https://avatars.githubusercontent.com/u/1347620?v=4?s=100" width="100px;" alt="Jam"/><br /><sub><b>Jam</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=JamsRepos" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://www.joelowrance.com"><img src="https://avatars.githubusercontent.com/u/63176?v=4?s=100" width="100px;" alt="Joe Lowrance"/><br /><sub><b>Joe Lowrance</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=joelowrance" title="Code">💻</a></td> <td align="center" valign="top" width="14.28%"><a href="http://www.joelowrance.com"><img src="https://avatars.githubusercontent.com/u/63176?v=4?s=100" width="100px;" alt="Joe Lowrance"/><br /><sub><b>Joe Lowrance</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=joelowrance" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/0xSysR3ll"><img src="https://avatars.githubusercontent.com/u/31414959?v=4?s=100" width="100px;" alt="0xsysr3ll"/><br /><sub><b>0xsysr3ll</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=0xSysR3ll" title="Code">💻</a></td> <td align="center" valign="top" width="14.28%"><a href="https://github.com/0xSysR3ll"><img src="https://avatars.githubusercontent.com/u/31414959?v=4?s=100" width="100px;" alt="0xsysr3ll"/><br /><sub><b>0xsysr3ll</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=0xSysR3ll" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://sudo-kraken.github.io/docs/"><img src="https://avatars.githubusercontent.com/u/53116754?v=4?s=100" width="100px;" alt="Joe Harrison"/><br /><sub><b>Joe Harrison</b></sub></a><br /><a href="#infra-sudo-kraken" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
</tr>
<tr>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/ale183"><img src="https://avatars.githubusercontent.com/u/8809439?v=4?s=100" width="100px;" alt="ale183"/><br /><sub><b>ale183</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=ale183" title="Code">💻</a></td>
</tr> </tr>
</tbody> </tbody>
</table> </table>

View File

@@ -3,8 +3,8 @@ kubeVersion: ">=1.23.0-0"
name: jellyseerr-chart name: jellyseerr-chart
description: Jellyseerr helm chart for Kubernetes description: Jellyseerr helm chart for Kubernetes
type: application type: application
version: 2.7.0 version: 2.5.0
appVersion: "2.7.3" appVersion: "2.6.0"
maintainers: maintainers:
- name: Jellyseerr - name: Jellyseerr
url: https://github.com/Fallenbagel/jellyseerr url: https://github.com/Fallenbagel/jellyseerr

View File

@@ -1,6 +1,6 @@
# jellyseerr-chart # jellyseerr-chart
![Version: 2.7.0](https://img.shields.io/badge/Version-2.7.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 2.7.3](https://img.shields.io/badge/AppVersion-2.7.3-informational?style=flat-square) ![Version: 2.5.0](https://img.shields.io/badge/Version-2.5.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 2.6.0](https://img.shields.io/badge/AppVersion-2.6.0-informational?style=flat-square)
Jellyseerr helm chart for Kubernetes Jellyseerr helm chart for Kubernetes
@@ -20,17 +20,6 @@ Jellyseerr helm chart for Kubernetes
Kubernetes: `>=1.23.0-0` Kubernetes: `>=1.23.0-0`
## Update Notes
### Updating to 2.7.0
Jellyseerr is a stateful application and it is not designed to have multiple replicas. In version 2.7.0 we address this by:
- replacing `Deployment` with `StatefulSet`
- removing `replicaCount` value
If `replicaCount` value was used - remove it. Helm update should work fine after that.
## Values ## Values
| Key | Type | Default | Description | | Key | Type | Default | Description |
@@ -66,6 +55,7 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
| probes.livenessProbe | object | `{}` | Configure liveness probe | | probes.livenessProbe | object | `{}` | Configure liveness probe |
| probes.readinessProbe | object | `{}` | Configure readiness probe | | probes.readinessProbe | object | `{}` | Configure readiness probe |
| probes.startupProbe | string | `nil` | Configure startup probe | | probes.startupProbe | string | `nil` | Configure startup probe |
| replicaCount | int | `1` | |
| resources | object | `{}` | | | resources | object | `{}` | |
| securityContext | object | `{}` | | | securityContext | object | `{}` | |
| service.port | int | `80` | | | service.port | int | `80` | |
@@ -74,6 +64,7 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
| serviceAccount.automount | bool | `true` | Automatically mount a ServiceAccount's API credentials? | | serviceAccount.automount | bool | `true` | Automatically mount a ServiceAccount's API credentials? |
| serviceAccount.create | bool | `true` | Specifies whether a service account should be created | | serviceAccount.create | bool | `true` | Specifies whether a service account should be created |
| serviceAccount.name | string | `""` | If not set and create is true, a name is generated using the fullname template | | serviceAccount.name | string | `""` | If not set and create is true, a name is generated using the fullname template |
| strategy | object | `{"type":"Recreate"}` | Deployment strategy |
| tolerations | list | `[]` | | | tolerations | list | `[]` | |
| volumeMounts | list | `[]` | Additional volumeMounts on the output StatefulSet definition. | | volumeMounts | list | `[]` | Additional volumeMounts on the output Deployment definition. |
| volumes | list | `[]` | Additional volumes on the output StatefulSet definition. | | volumes | list | `[]` | Additional volumes on the output Deployment definition. |

View File

@@ -14,15 +14,4 @@
{{ template "chart.requirementsSection" . }} {{ template "chart.requirementsSection" . }}
## Update Notes
### Updating to 2.7.0
Jellyseerr is a stateful application and it is not designed to have multiple replicas. In version 2.7.0 we address this by:
- replacing `Deployment` with `StatefulSet`
- removing `replicaCount` value
If `replicaCount` value was used - remove it. Helm update should work fine after that.
{{ template "chart.valuesSection" . }} {{ template "chart.valuesSection" . }}

View File

@@ -1,11 +1,13 @@
apiVersion: apps/v1 apiVersion: apps/v1
kind: StatefulSet kind: Deployment
metadata: metadata:
name: {{ include "jellyseerr.fullname" . }} name: {{ include "jellyseerr.fullname" . }}
labels: labels:
{{- include "jellyseerr.labels" . | nindent 4 }} {{- include "jellyseerr.labels" . | nindent 4 }}
spec: spec:
serviceName: {{ include "jellyseerr.fullname" . }} replicas: {{ .Values.replicaCount }}
strategy:
type: {{ .Values.strategy.type }}
selector: selector:
matchLabels: matchLabels:
{{- include "jellyseerr.selectorLabels" . | nindent 6 }} {{- include "jellyseerr.selectorLabels" . | nindent 6 }}

View File

@@ -1,3 +1,5 @@
replicaCount: 1
image: image:
registry: ghcr.io registry: ghcr.io
repository: fallenbagel/jellyseerr repository: fallenbagel/jellyseerr
@@ -10,6 +12,10 @@ imagePullSecrets: []
nameOverride: "" nameOverride: ""
fullnameOverride: "" fullnameOverride: ""
# -- Deployment strategy
strategy:
type: Recreate
# Liveness / Readiness / Startup Probes # Liveness / Readiness / Startup Probes
probes: probes:
# -- Configure liveness probe # -- Configure liveness probe
@@ -109,14 +115,14 @@ resources: {}
# cpu: 100m # cpu: 100m
# memory: 128Mi # memory: 128Mi
# -- Additional volumes on the output StatefulSet definition. # -- Additional volumes on the output Deployment definition.
volumes: [] volumes: []
# - name: foo # - name: foo
# secret: # secret:
# secretName: mysecret # secretName: mysecret
# optional: false # optional: false
# -- Additional volumeMounts on the output StatefulSet definition. # -- Additional volumeMounts on the output Deployment definition.
volumeMounts: [] volumeMounts: []
# - name: foo # - name: foo
# mountPath: "/etc/foo" # mountPath: "/etc/foo"

View File

@@ -6,6 +6,7 @@
"apiKey": "testkey", "apiKey": "testkey",
"applicationTitle": "Jellyseerr", "applicationTitle": "Jellyseerr",
"applicationUrl": "", "applicationUrl": "",
"csrfProtection": false,
"cacheImages": false, "cacheImages": false,
"defaultPermissions": 32, "defaultPermissions": 32,
"defaultQuotas": { "defaultQuotas": {
@@ -82,6 +83,13 @@
"enableMentions": true "enableMentions": true
} }
}, },
"lunasea": {
"enabled": false,
"types": 0,
"options": {
"webhookUrl": ""
}
},
"slack": { "slack": {
"enabled": false, "enabled": false,
"types": 0, "types": 0,
@@ -179,26 +187,5 @@
"image-cache-cleanup": { "image-cache-cleanup": {
"schedule": "0 0 5 * * *" "schedule": "0 0 5 * * *"
} }
},
"network": {
"csrfProtection": false,
"trustProxy": false,
"forceIpv4First": false,
"dnsServers": "",
"proxy": {
"enabled": false,
"hostname": "",
"port": 8080,
"useSsl": false,
"user": "",
"password": "",
"bypassFilter": "",
"bypassLocalAddresses": true
},
"dnsCache": {
"enabled": false,
"forceMinTtl": 0,
"forceMaxTtl": -1
}
} }
} }

View File

@@ -1,148 +0,0 @@
describe('TVDB Integration', () => {
// Constants for routes and selectors
const ROUTES = {
home: '/',
metadataSettings: '/settings/metadata',
tomorrowIsOursTvShow: '/tv/72879',
monsterTvShow: '/tv/225634',
dragonnBallZKaiAnime: '/tv/61709',
};
const SELECTORS = {
sidebarToggle: '[data-testid=sidebar-toggle]',
sidebarSettingsMobile: '[data-testid=sidebar-menu-settings-mobile]',
settingsNavDesktop: 'nav[data-testid="settings-nav-desktop"]',
metadataTestButton: 'button[type="button"]:contains("Test")',
metadataSaveButton: '[data-testid="metadata-save-button"]',
tmdbStatus: '[data-testid="tmdb-status"]',
tvdbStatus: '[data-testid="tvdb-status"]',
tvMetadataProviderSelector: '[data-testid="tv-metadata-provider-selector"]',
animeMetadataProviderSelector:
'[data-testid="anime-metadata-provider-selector"]',
seasonSelector: '[data-testid="season-selector"]',
season1: 'Season 1',
season2: 'Season 2',
season3: 'Season 3',
episodeList: '[data-testid="episode-list"]',
episode9: '9 - Hang Men',
};
// Reusable commands
const navigateToMetadataSettings = () => {
cy.visit(ROUTES.home);
cy.get(SELECTORS.sidebarToggle).click();
cy.get(SELECTORS.sidebarSettingsMobile).click();
cy.get(
`${SELECTORS.settingsNavDesktop} a[href="${ROUTES.metadataSettings}"]`
).click();
};
const testAndVerifyMetadataConnection = () => {
cy.intercept('POST', '/api/v1/settings/metadatas/test').as(
'testConnection'
);
cy.get(SELECTORS.metadataTestButton).click();
return cy.wait('@testConnection');
};
const saveMetadataSettings = (customBody = null) => {
if (customBody) {
cy.intercept('PUT', '/api/v1/settings/metadatas', (req) => {
req.body = customBody;
}).as('saveMetadata');
} else {
// Else just intercept without modifying body
cy.intercept('PUT', '/api/v1/settings/metadatas').as('saveMetadata');
}
cy.get(SELECTORS.metadataSaveButton).click();
return cy.wait('@saveMetadata');
};
beforeEach(() => {
// Perform login
cy.login(Cypress.env('ADMIN_EMAIL'), Cypress.env('ADMIN_PASSWORD'));
// Navigate to Metadata settings
navigateToMetadataSettings();
// Verify we're on the correct settings page
cy.contains('h3', 'Metadata Providers').should('be.visible');
// Configure TVDB as TV provider and test connection
cy.get(SELECTORS.tvMetadataProviderSelector).click();
// get id react-select-4-option-1
cy.get('[class*="react-select__option"]').contains('TheTVDB').click();
// Test the connection
testAndVerifyMetadataConnection().then(({ response }) => {
expect(response.statusCode).to.equal(200);
// Check TVDB connection status
cy.get(SELECTORS.tvdbStatus).should('contain', 'Operational');
});
// Save settings
saveMetadataSettings({
anime: 'tvdb',
tv: 'tvdb',
}).then(({ response }) => {
expect(response.statusCode).to.equal(200);
expect(response.body.tv).to.equal('tvdb');
});
});
it('should display "Tomorrow is Ours" show information with multiple seasons from TVDB', () => {
// Navigate to the TV show
cy.visit(ROUTES.tomorrowIsOursTvShow);
// Verify that multiple seasons are displayed (TMDB has only 1 season, TVDB has multiple)
// cy.get(SELECTORS.seasonSelector).should('exist');
cy.intercept('/api/v1/tv/225634/season/1').as('season1');
// Select Season 2 and verify it loads
cy.contains(SELECTORS.season2)
.should('be.visible')
.scrollIntoView()
.click();
// Verify that episodes are displayed for Season 2
cy.contains('260 - Episode 506').should('be.visible');
});
it('Should display "Monster" show information correctly when not existing on TVDB', () => {
// Navigate to the TV show
cy.visit(ROUTES.monsterTvShow);
// Intercept season 1 request
cy.intercept('/api/v1/tv/225634/season/1').as('season1');
// Select Season 1
cy.contains(SELECTORS.season1)
.should('be.visible')
.scrollIntoView()
.click();
// Wait for the season data to load
cy.wait('@season1');
// Verify specific episode exists
cy.contains(SELECTORS.episode9).should('be.visible');
});
it('should display "Dragon Ball Z Kai" show information with multiple only 2 seasons from TVDB', () => {
// Navigate to the TV show
cy.visit(ROUTES.dragonnBallZKaiAnime);
// Intercept season 1 request
cy.intercept('/api/v1/tv/61709/season/1').as('season1');
// Select Season 2 and verify it visible
cy.contains(SELECTORS.season2)
.should('be.visible')
.scrollIntoView()
.click();
// select season 3 and verify it not visible
cy.contains(SELECTORS.season3).should('not.exist');
});
});

View File

@@ -36,7 +36,7 @@ describe('User List', () => {
cy.get('#email').type(testUser.emailAddress); cy.get('#email').type(testUser.emailAddress);
cy.get('#password').type(testUser.password); cy.get('#password').type(testUser.password);
cy.intercept('/api/v1/user?take=10&skip=0&sort=displayname').as('user'); cy.intercept('/api/v1/user*').as('user');
cy.get('[data-testid=modal-ok-button]').click(); cy.get('[data-testid=modal-ok-button]').click();
@@ -56,7 +56,7 @@ describe('User List', () => {
cy.get('[data-testid=modal-title]').should('contain', `Delete User`); cy.get('[data-testid=modal-title]').should('contain', `Delete User`);
cy.intercept('/api/v1/user?take=10&skip=0&sort=displayname').as('user'); cy.intercept('/api/v1/user*').as('user');
cy.get('[data-testid=modal-ok-button]').should('contain', 'Delete').click(); cy.get('[data-testid=modal-ok-button]').should('contain', 'Delete').click();

View File

@@ -12,7 +12,7 @@ Jellyseerr supports SQLite and PostgreSQL. The database connection can be config
If you want to use SQLite, you can simply set the `DB_TYPE` environment variable to `sqlite`. This is the default configuration so even if you don't set any other options, SQLite will be used. If you want to use SQLite, you can simply set the `DB_TYPE` environment variable to `sqlite`. This is the default configuration so even if you don't set any other options, SQLite will be used.
```dotenv ```dotenv
DB_TYPE=sqlite # Which DB engine to use, either sqlite or postgres. The default is sqlite. DB_TYPE="sqlite" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
CONFIG_DIRECTORY="config" # (optional) The path to the config directory where the db file is stored. The default is "config". CONFIG_DIRECTORY="config" # (optional) The path to the config directory where the db file is stored. The default is "config".
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false". DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
``` ```
@@ -24,7 +24,7 @@ DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging.
If your PostgreSQL server is configured to accept TCP connections, you can specify the host and port using the `DB_HOST` and `DB_PORT` environment variables. This is useful for remote connections where the server uses a network host and port. If your PostgreSQL server is configured to accept TCP connections, you can specify the host and port using the `DB_HOST` and `DB_PORT` environment variables. This is useful for remote connections where the server uses a network host and port.
```dotenv ```dotenv
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite. DB_TYPE="postgres" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
DB_HOST="localhost" # (optional) The host (URL) of the database. The default is "localhost". DB_HOST="localhost" # (optional) The host (URL) of the database. The default is "localhost".
DB_PORT="5432" # (optional) The port to connect to. The default is "5432". DB_PORT="5432" # (optional) The port to connect to. The default is "5432".
DB_USER= # (required) Username used to connect to the database. DB_USER= # (required) Username used to connect to the database.
@@ -38,7 +38,7 @@ DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging.
If your PostgreSQL server is configured to accept Unix socket connections, you can specify the path to the socket directory using the `DB_SOCKET_PATH` environment variable. This is useful for local connections where the server uses a Unix socket. If your PostgreSQL server is configured to accept Unix socket connections, you can specify the path to the socket directory using the `DB_SOCKET_PATH` environment variable. This is useful for local connections where the server uses a Unix socket.
```dotenv ```dotenv
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite. DB_TYPE="postgres" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
DB_SOCKET_PATH="/var/run/postgresql" # (required) The path to the PostgreSQL Unix socket directory. DB_SOCKET_PATH="/var/run/postgresql" # (required) The path to the PostgreSQL Unix socket directory.
DB_USER= # (required) Username used to connect to the database. DB_USER= # (required) Username used to connect to the database.
DB_PASS= # (optional) Password of the user used to connect to the database, depending on the server's authentication configuration. DB_PASS= # (optional) Password of the user used to connect to the database, depending on the server's authentication configuration.
@@ -46,27 +46,6 @@ DB_NAME="jellyseerr" # (optional) The name of the database to connect to. The de
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false". DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
``` ```
:::info
**Finding Your PostgreSQL Socket Path**
The PostgreSQL socket path varies by operating system and installation method:
- **Ubuntu/Debian**: `/var/run/postgresql`
- **CentOS/RHEL/Fedora**: `/var/run/postgresql`
- **macOS (Homebrew)**: `/tmp` or `/opt/homebrew/var/postgresql`
- **macOS (Postgres.app)**: `/tmp`
- **Windows**: Not applicable (uses TCP connections)
You can find your socket path by running:
```bash
# Find PostgreSQL socket directory
find /tmp /var/run /run -name ".s.PGSQL.*" 2>/dev/null | head -1 | xargs dirname
# Or check PostgreSQL configuration
sudo -u postgres psql -c "SHOW unix_socket_directories;"
```
:::
### SSL configuration ### SSL configuration
The following options can be used to further configure ssl. Certificates can be provided as a string or a file path, with the string version taking precedence. The following options can be used to further configure ssl. Certificates can be provided as a string or a file path, with the string version taking precedence.
@@ -77,11 +56,10 @@ DB_SSL_REJECT_UNAUTHORIZED="true" # (optional) Whether to reject ssl connections
DB_SSL_CA= # (optional) The CA certificate to verify the connection, provided as a string. The default is "". DB_SSL_CA= # (optional) The CA certificate to verify the connection, provided as a string. The default is "".
DB_SSL_CA_FILE= # (optional) The path to a CA certificate to verify the connection. The default is "". DB_SSL_CA_FILE= # (optional) The path to a CA certificate to verify the connection. The default is "".
DB_SSL_KEY= # (optional) The private key for the connection in PEM format, provided as a string. The default is "". DB_SSL_KEY= # (optional) The private key for the connection in PEM format, provided as a string. The default is "".
DB_SSL_KEY_FILE= # (optional) Path to the private key for the connection in PEM format. The default is "". DB_SSL_KEY_FILE= # (optinal) Path to the private key for the connection in PEM format. The default is "".
DB_SSL_CERT= # (optional) Certificate chain in pem format for the private key, provided as a string. The default is "". DB_SSL_CERT= # (optional) Certificate chain in pem format for the private key, provided as a string. The default is "".
DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the private key. The default is "". DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the private key. The default is "".
``` ```
--- ---
### Migrating from SQLite to PostgreSQL ### Migrating from SQLite to PostgreSQL
@@ -90,76 +68,15 @@ DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the p
2. Run Jellyseerr to create the tables in the PostgreSQL database 2. Run Jellyseerr to create the tables in the PostgreSQL database
3. Stop Jellyseerr 3. Stop Jellyseerr
4. Run the following command to export the data from the SQLite database and import it into the PostgreSQL database: 4. Run the following command to export the data from the SQLite database and import it into the PostgreSQL database:
:::info :::info
Edit the postgres connection string (without the \{\{ and \}\} brackets) to match your setup. Edit the postgres connection string to match your setup.
If you don't have or don't want to use docker, you can build the working pgloader version [in this PR](https://github.com/dimitri/pgloader/pull/1531) from source and use the same options as below. If you don't have or don't want to use docker, you can build the working pgloader version [in this PR](https://github.com/dimitri/pgloader/pull/1531) from source and use the same options as below.
::: :::
:::caution :::caution
The most recent release of pgloader has an issue quoting the table columns. Use the version in the docker container to avoid this issue. The most recent release of pgloader has an issue quoting the table columns. Use the version in the docker container to avoid this issue.
::: :::
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
<Tabs>
<TabItem value="docker" label="Using pgloader Container (Recommended)" default>
**Recommended method**: Use the pgloader container even for standalone Jellyseerr installations. This avoids building from source and ensures compatibility.
```bash ```bash
# For standalone installations (no Docker network needed) docker run --rm -v config/db.sqlite3:/db.sqlite3:ro ghcr.io/ralgar/pgloader:pr-1531 pgloader --with "quote identifiers" --with "data only" /db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
docker run --rm \ ```
-v /path/to/your/config/db.sqlite3:/db.sqlite3:ro \
ghcr.io/ralgar/pgloader:pr-1531 \
pgloader --with "quote identifiers" --with "data only" \
/db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
```
**For Docker Compose setups**: Add the network parameter if your PostgreSQL is also in a container:
```bash
docker run --rm \
--network your-jellyseerr-network \
-v /path/to/your/config/db.sqlite3:/db.sqlite3:ro \
ghcr.io/ralgar/pgloader:pr-1531 \
pgloader --with "quote identifiers" --with "data only" \
/db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
```
</TabItem>
<TabItem value="standalone" label="Building pgloader from Source">
For users who prefer not to use Docker or need a custom build:
```bash
# Clone the repository and checkout the working version
git clone https://github.com/dimitri/pgloader.git
cd pgloader
git fetch origin pull/1531/head:pr-1531
git checkout pr-1531
# Follow the official installation instructions
# See: https://github.com/dimitri/pgloader/blob/master/INSTALL.md
```
:::info
**Building pgloader from source requires following the complete installation process outlined in the [official pgloader INSTALL.md](https://github.com/dimitri/pgloader/blob/master/INSTALL.md).**
Please refer to the official documentation for detailed, up-to-date installation instructions.
:::
Once pgloader is built, run the migration:
```bash
# Run migration (adjust path to your config directory)
./pgloader --with "quote identifiers" --with "data only" \
/path/to/your/config/db.sqlite3 \
postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
```
</TabItem>
</Tabs>
5. Start Jellyseerr 5. Start Jellyseerr

View File

@@ -207,62 +207,3 @@ labels:
``` ```
For more information, please refer to the [Traefik documentation](https://doc.traefik.io/traefik/user-guides/docker-compose/basic-example/). For more information, please refer to the [Traefik documentation](https://doc.traefik.io/traefik/user-guides/docker-compose/basic-example/).
## Apache2 HTTP Server
<Tabs groupId="apache2-reverse-proxy" queryString>
<TabItem value="subdomain" label="Subdomain">
Add the following Location block to your existing Server configuration.
```apache
# Jellyseerr
ProxyPreserveHost On
ProxyPass / http://localhost:5055 retry=0 connectiontimeout=5 timeout=30 keepalive=on
ProxyPassReverse http://localhost:5055 /
RequestHeader set Connection ""
```
</TabItem>
<TabItem value="subfolder" label="Subfolder">
:::warning
This Apache2 subfolder reverse proxy is an unsupported workaround, and only provided as an example. The filters may stop working when Jellyseerr is updated.
If you encounter any issues with Jellyseerr while using this workaround, we may ask you to try to reproduce the problem without the Apache2 proxy.
:::
Add the following Location block to your existing Server configuration.
```apache
# Jellyseerr
# We will use "/jellyseerr" as subfolder
# You can replace it with any that you like
<Location /jellyseerr>
ProxyPreserveHost On
ProxyPass http://localhost:5055 retry=0 connectiontimeout=5 timeout=30 keepalive=on
ProxyPassReverse http://localhost:5055
RequestHeader set Connection ""
# Header update, to support subfolder
# Please Replace "FQDN" with your domain
Header edit location ^/login https://FQDN/jellyseerr/login
Header edit location ^/setup https://FQDN/jellyseerr/setup
AddOutputFilterByType INFLATE;SUBSTITUTE text/html application/javascript application/json
SubstituteMaxLineLength 2000K
# This is HTML and JS update
# Please update "/jellyseerr" if needed
Substitute "s|href=\"|href=\"/jellyseerr|inq"
Substitute "s|src=\"|src=\"/jellyseerr|inq"
Substitute "s|/api/|/jellyseerr/api/|inq"
Substitute "s|\"/_next/|\"/jellyseerr/_next/|inq"
# This is JSON update
Substitute "s|\"/avatarproxy/|\"/jellyseerr/avatarproxy/|inq"
</Location>
```
</TabItem>
</Tabs>

View File

@@ -15,7 +15,7 @@ import TabItem from '@theme/TabItem';
### Prerequisites ### Prerequisites
- [Node.js 22.x](https://nodejs.org/en/download/) - [Node.js 22.x](https://nodejs.org/en/download/)
- [Pnpm 10.x](https://pnpm.io/installation) - [Pnpm 9.x](https://pnpm.io/installation)
- [Git](https://git-scm.com/downloads) - [Git](https://git-scm.com/downloads)
## Unix (Linux, macOS) ## Unix (Linux, macOS)

View File

@@ -33,31 +33,20 @@ docker run -d \
--name jellyseerr \ --name jellyseerr \
-e LOG_LEVEL=debug \ -e LOG_LEVEL=debug \
-e TZ=Asia/Tashkent \ -e TZ=Asia/Tashkent \
-e PORT=5055 \ -e PORT=5055 `#optional` \
-p 5055:5055 \ -p 5055:5055 \
-v /path/to/appdata/config:/app/config \ -v /path/to/appdata/config:/app/config \
--restart unless-stopped \ --restart unless-stopped \
fallenbagel/jellyseerr fallenbagel/jellyseerr
``` ```
The argument `-e PORT=5055` is optional.
If you want to add a healthcheck to the above command, you can add the following flags :
```
--health-cmd "wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1" \
--health-start-period 20s \
--health-timeout 3s \
--health-interval 15s \
--health-retries 3 \
```
To run the container as a specific user/group, you may optionally add `--user=[ user | user:group | uid | uid:gid | user:gid | uid:group ]` to the above command. To run the container as a specific user/group, you may optionally add `--user=[ user | user:group | uid | uid:gid | user:gid | uid:group ]` to the above command.
#### Updating: #### Updating:
Stop and remove the existing container: Stop and remove the existing container:
```bash ```bash
docker stop jellyseerr && docker rm jellyseerr docker stop jellyseerr && docker rm Jellyseerr
``` ```
Pull the latest image: Pull the latest image:
```bash ```bash
@@ -94,12 +83,6 @@ services:
- 5055:5055 - 5055:5055
volumes: volumes:
- /path/to/appdata/config:/app/config - /path/to/appdata/config:/app/config
healthcheck:
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
start_period: 20s
timeout: 3s
interval: 15s
retries: 3
restart: unless-stopped restart: unless-stopped
``` ```
@@ -154,26 +137,7 @@ Then, create and start the Jellyseerr container:
<Tabs groupId="docker-methods" queryString> <Tabs groupId="docker-methods" queryString>
<TabItem value="docker-cli" label="Docker CLI"> <TabItem value="docker-cli" label="Docker CLI">
```bash ```bash
docker run -d \ docker run -d --name jellyseerr -e LOG_LEVEL=debug -e TZ=Asia/Tashkent -p 5055:5055 -v "jellyseerr-data:/app/config" --restart unless-stopped fallenbagel/jellyseerr:latest
--name jellyseerr \
-e LOG_LEVEL=debug \
-e TZ=Asia/Tashkent \
-e PORT=5055 \
-p 5055:5055 \
-v jellyseerr-data:/app/config \
--restart unless-stopped \
fallenbagel/jellyseerr
```
The argument `-e PORT=5055` is optional.
If you want to add a healthcheck to the above command, you can add the following flags :
```
--health-cmd "wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1" \
--health-start-period 20s \
--health-timeout 3s \
--health-interval 15s \
--health-retries 3 \
``` ```
#### Updating: #### Updating:
@@ -201,12 +165,6 @@ services:
- 5055:5055 - 5055:5055
volumes: volumes:
- jellyseerr-data:/app/config - jellyseerr-data:/app/config
healthcheck:
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
start_period: 20s
timeout: 3s
interval: 15s
retries: 3
restart: unless-stopped restart: unless-stopped
volumes: volumes:

View File

@@ -1,5 +1,5 @@
--- ---
title: Kubernetes (Advanced) title: Kubernetes
description: Install Jellyseerr in Kubernetes description: Install Jellyseerr in Kubernetes
sidebar_position: 5 sidebar_position: 5
--- ---

View File

@@ -105,12 +105,6 @@ In some places (like China), the ISP blocks not only the DNS resolution but also
You can configure Jellyseerr to use a proxy with the [HTTP(S) Proxy](/using-jellyseerr/settings/general#https-proxy) setting. You can configure Jellyseerr to use a proxy with the [HTTP(S) Proxy](/using-jellyseerr/settings/general#https-proxy) setting.
### Option 3: Force IPV4 resolution first
Sometimes there are configuration issues with IPV6 that prevent the hostname resolution from working correctly.
You can try to force the resolution to use IPV4 first by going to `Settings > Networking > Advanced Networking` and enabling `Force IPv4 Resolution First` setting and restarting Jellyseerr.
### Option 4: Check that your server can reach TMDB API ### Option 4: Check that your server can reach TMDB API
Make sure that your server can reach the TMDB API by running the following command: Make sure that your server can reach the TMDB API by running the following command:

View File

@@ -1,21 +0,0 @@
---
title: Gotify
description: Configure Gotify notifications.
sidebar_position: 5
---
# Gotify
## Configuration
### Server URL
Set this to the URL of your Gotify server.
### Application Token
Add an application to your Gotify server, and set this field to the generated application token.
:::info
Please refer to the [Gotify API documentation](https://gotify.net/docs) for more details on configuring these notifications.
:::

View File

@@ -1,29 +0,0 @@
---
title: ntfy.sh
description: Configure ntfy.sh notifications.
sidebar_position: 6
---
# ntfy.sh
## Configuration
### Server Root URL
Set this to the URL of your ntfy.sh server.
### Topic
Set this to the topic you want to send notifications to.
### Username + Password authentication (optional)
Set this to the username and password for your ntfy.sh server.
### Token authentication (optional)
Set this to the token for your ntfy.sh server.
:::info
Please refer to the [ntfy.sh API documentation](https://docs.ntfy.sh/) for more details on configuring these notifications.
:::

View File

@@ -1,23 +0,0 @@
---
title: Pushbullet
description: Configure Pushbullet notifications.
sidebar_position: 7
---
# Pushbullet
:::info
Users can optionally configure personal notifications in their user settings.
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
:::
## Configuration
### Access Token
[Create an access token](https://www.pushbullet.com/#settings) and set it here to grant Jellyseerr access to the Pushbullet API.
### Channel Tag (optional)
Optionally, [create a channel](https://www.pushbullet.com/my-channel) to allow other users to follow the notification feed using the specified channel tag.

View File

@@ -1,27 +0,0 @@
---
title: Pushover
description: Configure Pushover notifications.
sidebar_position: 8
---
# Pushover
:::info
Users can optionally configure personal notifications in their user settings.
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
:::
## Configuration
### Application/API Token
[Register an application](https://pushover.net/apps/build) and enter the API token in this field. (You can use one of the [official icons in our GitHub repository](https://github.com/fallenbagel/jellyseerr/tree/develop/public) when configuring the application.)
For more details on registering applications or the API token, please see the [Pushover API documentation](https://pushover.net/api#registration).
### User Key
Set this to the user key for your Pushover account. Alternatively, you can set this to a group key to deliver notifications to multiple users.
For more details, please see the [Pushover API documentation](https://pushover.net/api#identifiers).

View File

@@ -1,17 +0,0 @@
---
title: Slack
description: Configure Slack notifications.
sidebar_position: 9
---
# Slack
## Configuration
### Webhook URL
Simply [create a webhook](https://my.slack.com/services/new/incoming-webhook/) and enter the URL in this field.
:::info
Please refer to the [Slack API documentation](https://api.slack.com/messaging/webhooks) for more details on configuring these notifications.
:::

View File

@@ -1,39 +0,0 @@
---
title: Telegram
description: Configure Telegram notifications.
sidebar_position: 10
---
# Telegram
:::info
Users can optionally configure personal notifications in their user settings.
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
:::
## Configuration
:::info
In order to configure Telegram notifications, you first need to [create a bot](https://telegram.me/BotFather).
Bots **cannot** initiate conversations with users, so users must have your bot added to a conversation in order to receive notifications.
:::
### Bot Username (optional)
If this value is configured, users will be able to click a link to start a chat with your bot and configure their own personal notifications.
The bot username should end with `_bot`, and the `@` prefix should be omitted.
### Bot Authentication Token
At the end of the bot creation process, [@BotFather](https://telegram.me/botfather) will provide an authentication token.
### Chat ID
To obtain your chat ID, simply create a new group chat, add [@get_id_bot](https://telegram.me/get_id_bot), and issue the `/my_id` command.
### Send Silently (optional)
Optionally, notifications can be sent silently. Silent notifications send messages without notification sounds.

View File

@@ -1,138 +0,0 @@
---
title: Webhook
description: Configure webhook notifications.
sidebar_position: 4
---
# Webhook
The webhook notification agent enables you to send a custom JSON payload to any endpoint for specific notification events.
## Configuration
### Webhook URL
The URL you would like to post notifications to. Your JSON will be sent as the body of the request.
### Authorization Header (optional)
:::info
This is typically not needed. Please refer to your webhook provider's documentation for details.
:::
This value will be sent as an `Authorization` HTTP header.
### JSON Payload
Customize the JSON payload to suit your needs. Jellyseerr provides several [template variables](#template-variables) for use in the payload, which will be replaced with the relevant data when the notifications are triggered.
## Template Variables
### General
| Variable | Value |
| ----------------------- | ----------------------------------------------------------------------------------------------------------------------------------- |
| `{{notification_type}}` | The type of notification (e.g. `MEDIA_PENDING` or `ISSUE_COMMENT`) |
| `{{event}}` | A friendly description of the notification event |
| `{{subject}}` | The notification subject (typically the media title) |
| `{{message}}` | The notification message body (the media overview/synopsis for request notifications; the issue description for issue notificatons) |
| `{{image}}` | The notification image (typically the media poster) |
### Notify User
These variables are for the target recipient of the notification.
| Variable | Value |
| ---------------------------------------- | ------------------------------------------------------------- |
| `{{notifyuser_username}}` | The target notification recipient's username |
| `{{notifyuser_email}}` | The target notification recipient's email address |
| `{{notifyuser_avatar}}` | The target notification recipient's avatar URL |
| `{{notifyuser_settings_discordId}}` | The target notification recipient's Discord ID (if set) |
| `{{notifyuser_settings_telegramChatId}}` | The target notification recipient's Telegram Chat ID (if set) |
:::info
The `notifyuser` variables are not defined for the following request notification types, as they are intended for application administrators rather than end users:
- Request Pending Approval
- Request Automatically Approved
- Request Processing Failed
On the other hand, the `notifyuser` variables _will_ be replaced with the requesting user's information for the below notification types:
- Request Approved
- Request Declined
- Request Available
If you would like to use the requesting user's information in your webhook, please instead include the relevant variables from the [Request](#request) section below.
:::
### Special
The following variables must be used as a key in the JSON payload (e.g., `"{{extra}}": []`).
| Variable | Value |
| ------------- | ------------------------------------------------------------------------------------------------------------------------------ |
| `{{media}}` | The relevant media object |
| `{{request}}` | The relevant request object |
| `{{issue}}` | The relevant issue object |
| `{{comment}}` | The relevant issue comment object |
| `{{extra}}` | The "extra" array of additional data for certain notifications (e.g., season/episode numbers for series-related notifications) |
#### Media
The `{{media}}` will be `null` if there is no relevant media object for the notification.
These following special variables are only included in media-related notifications, such as requests.
| Variable | Value |
| -------------------- | -------------------------------------------------------------------------------------------------------------- |
| `{{media_type}}` | The media type (`movie` or `tv`) |
| `{{media_tmdbid}}` | The media's TMDB ID |
| `{{media_tvdbid}}` | The media's TheTVDB ID |
| `{{media_status}}` | The media's availability status (`UNKNOWN`, `PENDING`, `PROCESSING`, `PARTIALLY_AVAILABLE`, or `AVAILABLE`) |
| `{{media_status4k}}` | The media's 4K availability status (`UNKNOWN`, `PENDING`, `PROCESSING`, `PARTIALLY_AVAILABLE`, or `AVAILABLE`) |
#### Request
The `{{request}}` will be `null` if there is no relevant media object for the notification.
The following special variables are only included in request-related notifications.
| Variable | Value |
| ----------------------------------------- | ----------------------------------------------- |
| `{{request_id}}` | The request ID |
| `{{requestedBy_username}}` | The requesting user's username |
| `{{requestedBy_email}}` | The requesting user's email address |
| `{{requestedBy_avatar}}` | The requesting user's avatar URL |
| `{{requestedBy_settings_discordId}}` | The requesting user's Discord ID (if set) |
| `{{requestedBy_settings_telegramChatId}}` | The requesting user's Telegram Chat ID (if set) |
#### Issue
The `{{issue}}` will be `null` if there is no relevant media object for the notification.
The following special variables are only included in issue-related notifications.
| Variable | Value |
| ---------------------------------------- | ----------------------------------------------- |
| `{{issue_id}}` | The issue ID |
| `{{reportedBy_username}}` | The requesting user's username |
| `{{reportedBy_email}}` | The requesting user's email address |
| `{{reportedBy_avatar}}` | The requesting user's avatar URL |
| `{{reportedBy_settings_discordId}}` | The requesting user's Discord ID (if set) |
| `{{reportedBy_settings_telegramChatId}}` | The requesting user's Telegram Chat ID (if set) |
#### Comment
The `{{comment}}` will be `null` if there is no relevant media object for the notification.
The following special variables are only included in issue comment-related notifications.
| Variable | Value |
| ----------------------------------------- | ----------------------------------------------- |
| `{{comment_message}}` | The comment message |
| `{{commentedBy_username}}` | The commenting user's username |
| `{{commentedBy_email}}` | The commenting user's email address |
| `{{commentedBy_avatar}}` | The commenting user's avatar URL |
| `{{commentedBy_settings_discordId}}` | The commenting user's Discord ID (if set) |
| `{{commentedBy_settings_telegramChatId}}` | The commenting user's Telegram Chat ID (if set) |

View File

@@ -1,10 +0,0 @@
{
"label": "Plex Integration",
"position": 3,
"link": {
"type": "generated-index",
"title": "Plex Integration",
"description": "Learn about Jellyseerr's Plex integration features"
}
}

View File

@@ -1,36 +0,0 @@
---
title: Overview
description: Learn about Jellyseerr's Plex integration features
sidebar_position: 1
---
# Plex Features Overview
Jellyseerr provides integration features that connect with your Plex media server to automate media management tasks.
## Available Features
- [Watchlist Auto Request](./plex/watchlist-auto-request) - Automatically request media from your Plex Watchlist
- More features coming soon!
## Prerequisites
:::info Authentication Required
To use any Plex integration features, you must have logged into Jellyseerr at least once with your Plex account.
:::
**Requirements:**
- Plex account with access to the configured Plex server
- Jellyseerr configured with Plex as the media server
- User authentication via Plex login
- Appropriate user permissions for specific features
## Getting Started
1. Authenticate at least once using your Plex credentials
2. Verify you have the necessary permissions for desired features
3. Follow individual feature guides for setup instructions
:::note Server Configuration
Plex server configuration is handled by your administrator. If you cannot log in with your Plex account, contact your administrator to verify the server setup.
:::

View File

@@ -1,95 +0,0 @@
---
title: Watchlist Auto Request
description: Learn how to use the Plex Watchlist Auto Request feature
sidebar_position: 1
---
# Watchlist Auto Request
The Plex Watchlist Auto Request feature allows Jellyseerr to automatically create requests for media items you add to your Plex Watchlist. Simply add content to your Plex Watchlist, and Jellyseerr will automatically request it for you.
:::info
This feature is only available for Plex users. Local users cannot use the Watchlist Auto Request feature.
:::
## Prerequisites
- You must have logged into Jellyseerr at least once with your Plex account
- Your administrator must have granted you the necessary permissions
- Your Plex account must have access to the Plex server configured in Jellyseerr
## Permission System
The Watchlist Auto Request feature uses a two-tier permission system:
### Administrator Permissions (Required)
Your administrator must grant you these permissions in your user profile:
- **Auto-Request** (master permission)
- **Auto-Request Movies** (for movie auto-requests)
- **Auto-Request Series** (for TV series auto-requests)
### User Activation (Required)
You must enable the feature in your own profile settings:
- **Auto-Request Movies** toggle
- **Auto-Request Series** toggle
:::warning Two-Step Process
Both administrator permissions AND user activation are required. Having permissions doesn't automatically enable the feature - you must also activate it in your profile.
:::
## How to Enable
### Step 1: Check Your Permissions
Contact your administrator to verify you have been granted:
- `Auto-Request` permission
- `Auto-Request Movies` and/or `Auto-Request Series` permissions
### Step 2: Activate the Feature
1. Go to your user profile settings
2. Navigate to the "General" section
3. Find the "Auto-Request" options
4. Enable the toggles for:
- **Auto-Request Movies** - to automatically request movies from your watchlist
- **Auto-Request Series** - to automatically request TV series from your watchlist
### Step 3: Start Using
- Add movies and TV shows to your Plex Watchlist
- Jellyseerr will automatically create requests for new items
- You'll receive notifications when items are auto-requested
## How It Works
Once properly configured, Jellyseerr will:
1. Periodically checks your Plex Watchlist for new items
2. Verify if the content already exists in your media libraries
3. Automatically submits requests for new items that aren't already available
4. Only requests content types you have permissions for
5. Notifiy you when auto-requests are created
:::info Content Limitations
Auto-request only works for standard quality content. 4K content must be requested manually if you have 4K permissions.
:::
## For Administrators
### Granting Permissions
1. Navigate to **Users** > **[Select User]** > **Permissions**
2. Enable the required permissions:
- **Auto-Request** (master toggle)
- **Auto-Request Movies** (for movie auto-requests)
- **Auto-Request Series** (for TV series auto-requests)
3. Optionally enable **Auto-Approve** permissions for automatic approval
### Default Permissions
- Go to **Settings** > **Users** > **Default Permissions**
- Configure auto-request permissions for new users
- This sets the default permissions but users still need to activate the feature individually
## Limitations
- Local users cannot use this feature
- 4K content requires manual requests
- Users must have logged into Jellyseerr with their Plex account
- Respects user request limits and quotas
- Won't request content already in your libraries

View File

@@ -1,16 +0,0 @@
---
title: DNS Caching
description: Configure DNS caching settings.
sidebar_position: 7
---
# DNS Caching
Jellyseerr uses DNS caching to improve performance and reduce the number of DNS lookups required for external API calls. This can help speed up response times and reduce load on DNS servers, when something like a Pi-hole is used as a DNS resolver.
## Configuration
You can enable the DNS caching settings in the Network tab of the Jellyseerr settings. The default values follow the standard DNS caching behavior.
- **Force Minimum TTL**: Set a minimum time-to-live (TTL) in seconds for DNS cache entries. This ensures that frequently accessed DNS records are cached for a longer period, reducing the need for repeated lookups. Default is 0.
- **Force Maximum TTL**: Set a maximum time-to-live (TTL) in seconds for DNS cache entries. This prevents infrequently accessed DNS records from being cached indefinitely, allowing for more up-to-date information to be retrieved. Default is -1 (unlimited).

View File

@@ -1,7 +1,6 @@
--- ---
title: Jobs & Cache title: Jobs & Cache
description: Configure jobs and cache settings. description: Configure jobs and cache settings.
sidebar_position: 6
--- ---
# Jobs & Cache # Jobs & Cache

View File

@@ -1,24 +0,0 @@
---
title: Welcome to the Jellyseerr Blog
description: The official Jellyseerr blog for release notes, technical updates, and community news.
slug: welcome
authors: [fallenbagel, gauthier-th]
tags: [announcement, jellyseerr, blog]
image: https://raw.githubusercontent.com/fallenbagel/jellyseerr/refs/heads/develop/gen-docs/static/img/logo.svg
hide_table_of_contents: false
---
We are pleased to introduce the official Jellyseerr blog.
This space will serve as the central place for:
- Release announcements
- Updates on new features and improvements
- Technical articles, such as details on our [**DNS caching package**](https://github.com/jellyseerr/dns-caching) and other enhancements
- Community-related news
<!--truncate-->
Our goal is to keep the community informed and provide deeper insights into the ongoing development of Jellyseerr.
Thank you for being part of the Jellyseerr project. More updates will follow soon.

View File

@@ -1,21 +0,0 @@
fallenbagel:
name: Fallenbagel
page: true
title: Developer & Maintainer of Jellyseerr
description: Core Maintainer & Developer of Jellyseerr | Full-Stack Software Engineer | MSc Software Engineering Candidate.
url: https://github.com/fallenbagel
image_url: https://github.com/fallenbagel.png
email: hello@fallenbagel.com
socials:
github: fallenbagel
gauthier-th:
name: Gauthier
page: true
title: Co-Developer & Co-Maintainer of Jellyseerr
description: Co-Maintainer & Developer of Jellyseerr | PhD Student in AI at ICB, Dijon
url: https://gauthierth.fr
image_url: https://github.com/gauthier-th.png
email: mail@gauthierth.fr
socials:
github: gauthier-th

View File

@@ -34,6 +34,7 @@ const config: Config = {
editUrl: editUrl:
'https://github.com/fallenbagel/jellyseerr/edit/develop/docs/', 'https://github.com/fallenbagel/jellyseerr/edit/develop/docs/',
}, },
blog: false,
pages: false, pages: false,
theme: { theme: {
customCss: './src/css/custom.css', customCss: './src/css/custom.css',
@@ -68,11 +69,6 @@ const config: Config = {
src: 'img/logo.svg', src: 'img/logo.svg',
}, },
items: [ items: [
{
to: 'blog',
label: 'Blog',
position: 'right',
},
{ {
href: 'https://github.com/fallenbagel/jellyseerr', href: 'https://github.com/fallenbagel/jellyseerr',
label: 'GitHub', label: 'GitHub',
@@ -92,19 +88,6 @@ const config: Config = {
}, },
], ],
}, },
{
title: 'Project',
items: [
{
label: 'Blog',
to: '/blog',
},
{
label: 'GitHub',
href: 'https://github.com/fallenbagel/jellyseerr',
},
],
},
{ {
title: 'Community', title: 'Community',
items: [ items: [

View File

@@ -2,7 +2,6 @@
"name": "gen-docs", "name": "gen-docs",
"version": "0.0.0", "version": "0.0.0",
"private": true, "private": true,
"packageManager": "pnpm@10.17.1",
"scripts": { "scripts": {
"docusaurus": "docusaurus", "docusaurus": "docusaurus",
"start": "docusaurus start", "start": "docusaurus start",
@@ -16,9 +15,9 @@
"typecheck": "tsc" "typecheck": "tsc"
}, },
"dependencies": { "dependencies": {
"@docusaurus/core": "3.9.1", "@docusaurus/core": "3.4.0",
"@docusaurus/preset-classic": "3.9.1", "@docusaurus/preset-classic": "3.4.0",
"@easyops-cn/docusaurus-search-local": "^0.52.1", "@easyops-cn/docusaurus-search-local": "^0.44.2",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0", "clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0", "prism-react-renderer": "^2.3.0",
@@ -27,11 +26,14 @@
"tailwindcss": "^3.4.4" "tailwindcss": "^3.4.4"
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "3.9.1", "@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/tsconfig": "3.9.1", "@docusaurus/tsconfig": "3.4.0",
"@docusaurus/types": "3.9.1", "@docusaurus/types": "3.4.0",
"typescript": "~5.2.2" "typescript": "~5.2.2"
}, },
"resolutions": {
"prismjs": "PrismJS/prism"
},
"browserslist": { "browserslist": {
"production": [ "production": [
">0.5%", ">0.5%",

8738
gen-docs/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -141,83 +141,14 @@ components:
UserSettings: UserSettings:
type: object type: object
properties: properties:
username:
type: string
nullable: true
example: 'Mr User'
email:
type: string
example: 'user@example.com'
discordId:
type: string
nullable: true
example: '123456789'
locale: locale:
type: string type: string
nullable: true
example: 'en'
discoverRegion: discoverRegion:
type: string type: string
nullable: true
example: 'US'
streamingRegion:
type: string
nullable: true
example: 'US'
originalLanguage: originalLanguage:
type: string type: string
nullable: true streamingRegion:
example: 'en' type: string
movieQuotaLimit:
type: number
nullable: true
description: 'Maximum number of movie requests allowed'
example: 10
movieQuotaDays:
type: number
nullable: true
description: 'Time period in days for movie quota'
example: 30
tvQuotaLimit:
type: number
nullable: true
description: 'Maximum number of TV requests allowed'
example: 5
tvQuotaDays:
type: number
nullable: true
description: 'Time period in days for TV quota'
example: 14
globalMovieQuotaDays:
type: number
nullable: true
description: 'Global movie quota days setting'
example: 30
globalMovieQuotaLimit:
type: number
nullable: true
description: 'Global movie quota limit setting'
example: 10
globalTvQuotaLimit:
type: number
nullable: true
description: 'Global TV quota limit setting'
example: 5
globalTvQuotaDays:
type: number
nullable: true
description: 'Global TV quota days setting'
example: 14
watchlistSyncMovies:
type: boolean
nullable: true
description: 'Enable watchlist sync for movies'
example: true
watchlistSyncTv:
type: boolean
nullable: true
description: 'Enable watchlist sync for TV'
example: false
MainSettings: MainSettings:
type: object type: object
properties: properties:
@@ -260,51 +191,9 @@ components:
csrfProtection: csrfProtection:
type: boolean type: boolean
example: false example: false
forceIpv4First:
type: boolean
example: false
trustProxy: trustProxy:
type: boolean type: boolean
example: false example: true
proxy:
type: object
properties:
enabled:
type: boolean
example: false
hostname:
type: string
example: ''
port:
type: number
example: 8080
useSsl:
type: boolean
example: false
user:
type: string
example: ''
password:
type: string
example: ''
bypassFilter:
type: string
example: ''
bypassLocalAddresses:
type: boolean
example: true
dnsCache:
type: object
properties:
enabled:
type: boolean
example: false
forceMinTtl:
type: number
example: 0
forceMaxTtl:
type: number
example: -1
PlexLibrary: PlexLibrary:
type: object type: object
properties: properties:
@@ -519,20 +408,6 @@ components:
serverID: serverID:
type: string type: string
readOnly: true readOnly: true
MetadataSettings:
type: object
properties:
settings:
type: object
properties:
tv:
type: string
enum: [tvdb, tmdb]
example: 'tvdb'
anime:
type: string
enum: [tvdb, tmdb]
example: 'tvdb'
TautulliSettings: TautulliSettings:
type: object type: object
properties: properties:
@@ -1451,9 +1326,6 @@ components:
type: string type: string
jsonPayload: jsonPayload:
type: string type: string
supportVariables:
type: boolean
example: false
TelegramSettings: TelegramSettings:
type: object type: object
properties: properties:
@@ -1553,6 +1425,22 @@ components:
type: boolean type: boolean
token: token:
type: string type: string
LunaSeaSettings:
type: object
properties:
enabled:
type: boolean
example: false
types:
type: number
example: 2
options:
type: object
properties:
webhookUrl:
type: string
profileName:
type: string
NotificationEmailSettings: NotificationEmailSettings:
type: object type: object
properties: properties:
@@ -2585,67 +2473,6 @@ paths:
type: string type: string
thumb: thumb:
type: string type: string
/settings/metadatas:
get:
summary: Get Metadata settings
description: Retrieves current Metadata settings.
tags:
- settings
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/MetadataSettings'
put:
summary: Update Metadata settings
description: Updates Metadata settings with the provided values.
tags:
- settings
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/MetadataSettings'
responses:
'200':
description: 'Values were successfully updated'
content:
application/json:
schema:
$ref: '#/components/schemas/MetadataSettings'
/settings/metadatas/test:
post:
summary: Test Provider configuration
description: Tests if the TVDB configuration is valid. Returns a list of available languages on success.
tags:
- settings
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
tmdb:
type: boolean
example: true
tvdb:
type: boolean
example: true
responses:
'200':
description: Succesfully connected to TVDB
content:
application/json:
schema:
type: object
properties:
message:
type: string
example: 'Successfully connected to TVDB'
/settings/tautulli: /settings/tautulli:
get: get:
summary: Get Tautulli settings summary: Get Tautulli settings
@@ -3087,68 +2914,6 @@ paths:
imageCount: imageCount:
type: number type: number
example: 123 example: 123
dnsCache:
type: object
properties:
stats:
type: object
properties:
size:
type: number
example: 1
maxSize:
type: number
example: 500
hits:
type: number
example: 19
misses:
type: number
example: 1
failures:
type: number
example: 0
ipv4Fallbacks:
type: number
example: 0
hitRate:
type: number
example: 0.95
entries:
type: array
additionalProperties:
type: object
properties:
addresses:
type: object
properties:
ipv4:
type: number
example: 1
ipv6:
type: number
example: 1
activeAddress:
type: string
example: 127.0.0.1
family:
type: number
example: 4
age:
type: number
example: 10
ttl:
type: number
example: 10
networkErrors:
type: number
example: 0
hits:
type: number
example: 1
misses:
type: number
example: 1
apiCaches: apiCaches:
type: array type: array
items: items:
@@ -3188,21 +2953,6 @@ paths:
responses: responses:
'204': '204':
description: 'Flushed cache' description: 'Flushed cache'
/settings/cache/dns/{dnsEntry}/flush:
post:
summary: Flush a specific DNS cache entry
description: Flushes a specific DNS cache entry
tags:
- settings
parameters:
- in: path
name: dnsEntry
required: true
schema:
type: string
responses:
'204':
description: 'Flushed dns cache'
/settings/logs: /settings/logs:
get: get:
summary: Returns logs summary: Returns logs
@@ -3349,6 +3099,52 @@ paths:
responses: responses:
'204': '204':
description: Test notification attempted description: Test notification attempted
/settings/notifications/lunasea:
get:
summary: Get LunaSea notification settings
description: Returns current LunaSea notification settings in a JSON object.
tags:
- settings
responses:
'200':
description: Returned LunaSea settings
content:
application/json:
schema:
$ref: '#/components/schemas/LunaSeaSettings'
post:
summary: Update LunaSea notification settings
description: Updates LunaSea notification settings with the provided values.
tags:
- settings
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/LunaSeaSettings'
responses:
'200':
description: 'Values were sucessfully updated'
content:
application/json:
schema:
$ref: '#/components/schemas/LunaSeaSettings'
/settings/notifications/lunasea/test:
post:
summary: Test LunaSea settings
description: Sends a test notification to the LunaSea agent.
tags:
- settings
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/LunaSeaSettings'
responses:
'204':
description: Test notification attempted
/settings/notifications/pushbullet: /settings/notifications/pushbullet:
get: get:
summary: Get Pushbullet notification settings summary: Get Pushbullet notification settings
@@ -4113,8 +3909,14 @@ paths:
name: sort name: sort
schema: schema:
type: string type: string
enum: [created, updated, requests, displayname] enum: [created, updated, requests, displayname, usertype, role]
default: created default: created
- in: query
name: sortDirection
schema:
type: string
enum: [asc, desc]
default: desc
- in: query - in: query
name: q name: q
required: false required: false
@@ -4735,7 +4537,11 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: '#/components/schemas/UserSettings' type: object
properties:
username:
type: string
example: 'Mr User'
post: post:
summary: Update general settings for a user summary: Update general settings for a user
description: Updates and returns general settings for a specific user. Requires `MANAGE_USERS` permission if editing other users. description: Updates and returns general settings for a specific user. Requires `MANAGE_USERS` permission if editing other users.
@@ -4752,14 +4558,22 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: '#/components/schemas/UserSettings' type: object
properties:
username:
type: string
nullable: true
responses: responses:
'200': '200':
description: Updated user general settings returned description: Updated user general settings returned
content: content:
application/json: application/json:
schema: schema:
$ref: '#/components/schemas/UserSettings' type: object
properties:
username:
type: string
example: 'Mr User'
/user/{userId}/settings/password: /user/{userId}/settings/password:
get: get:
summary: Get password page informatiom summary: Get password page informatiom
@@ -5198,12 +5012,6 @@ paths:
schema: schema:
type: string type: string
example: 1,2 example: 1,2
- in: query
name: excludeKeywords
schema:
type: string
example: 3,4
description: Comma-separated list of keyword IDs to exclude from results
- in: query - in: query
name: sortBy name: sortBy
schema: schema:
@@ -5524,12 +5332,6 @@ paths:
schema: schema:
type: string type: string
example: 1,2 example: 1,2
- in: query
name: excludeKeywords
schema:
type: string
example: 3,4
description: Comma-separated list of keyword IDs to exclude from results
- in: query - in: query
name: sortBy name: sortBy
schema: schema:
@@ -6153,7 +5955,7 @@ paths:
get: get:
summary: Gets request counts summary: Gets request counts
description: | description: |
Returns the number of requests by status including pending, approved, available, and completed requests. Returns the number of pending and approved requests.
tags: tags:
- request - request
responses: responses:
@@ -6180,8 +5982,6 @@ paths:
type: number type: number
available: available:
type: number type: number
completed:
type: number
/request/{requestId}: /request/{requestId}:
get: get:
summary: Get MediaRequest summary: Get MediaRequest
@@ -6564,7 +6364,7 @@ paths:
application/json: application/json:
schema: schema:
$ref: '#/components/schemas/TvDetails' $ref: '#/components/schemas/TvDetails'
/tv/{tvId}/season/{seasonNumber}: /tv/{tvId}/season/{seasonId}:
get: get:
summary: Get season details and episode list summary: Get season details and episode list
description: Returns season details with a list of episodes in a JSON object. description: Returns season details with a list of episodes in a JSON object.
@@ -6578,11 +6378,11 @@ paths:
type: number type: number
example: 76479 example: 76479
- in: path - in: path
name: seasonNumber name: seasonId
required: true required: true
schema: schema:
type: number type: number
example: 123456 example: 1
- in: query - in: query
name: language name: language
schema: schema:
@@ -6867,16 +6667,9 @@ paths:
example: '1' example: '1'
schema: schema:
type: string type: string
- in: query
name: is4k
description: Whether to remove from 4K service instance (true) or regular service instance (false)
required: false
example: false
schema:
type: boolean
responses: responses:
'204': '204':
description: Successfully removed media item description: Succesfully removed media item
/media/{mediaId}/{status}: /media/{mediaId}/{status}:
post: post:
summary: Update media status summary: Update media status
@@ -7543,22 +7336,11 @@ paths:
example: 1 example: 1
responses: responses:
'200': '200':
description: Keyword returned (null if not found) description: Keyword returned
content: content:
application/json: application/json:
schema: schema:
nullable: true
$ref: '#/components/schemas/Keyword' $ref: '#/components/schemas/Keyword'
'500':
description: Internal server error
content:
application/json:
schema:
type: object
properties:
message:
type: string
example: 'Unable to retrieve keyword data.'
/watchproviders/regions: /watchproviders/regions:
get: get:
summary: Get watch provider regions summary: Get watch provider regions

View File

@@ -2,7 +2,6 @@
"name": "jellyseerr", "name": "jellyseerr",
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"packageManager": "pnpm@10.17.1",
"scripts": { "scripts": {
"preinstall": "npx only-allow pnpm", "preinstall": "npx only-allow pnpm",
"postinstall": "node postinstall-win.js", "postinstall": "node postinstall-win.js",
@@ -47,7 +46,7 @@
"@types/ua-parser-js": "^0.7.36", "@types/ua-parser-js": "^0.7.36",
"@types/wink-jaro-distance": "^2.0.2", "@types/wink-jaro-distance": "^2.0.2",
"ace-builds": "1.15.2", "ace-builds": "1.15.2",
"axios": "1.10.0", "axios": "1.3.4",
"axios-rate-limit": "1.3.0", "axios-rate-limit": "1.3.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bowser": "2.11.0", "bowser": "2.11.0",
@@ -58,7 +57,6 @@
"cronstrue": "2.23.0", "cronstrue": "2.23.0",
"date-fns": "2.29.3", "date-fns": "2.29.3",
"dayjs": "1.11.7", "dayjs": "1.11.7",
"dns-caching": "^0.2.7",
"email-templates": "12.0.1", "email-templates": "12.0.1",
"email-validator": "2.0.4", "email-validator": "2.0.4",
"express": "4.21.2", "express": "4.21.2",
@@ -119,6 +117,10 @@
"devDependencies": { "devDependencies": {
"@commitlint/cli": "17.4.4", "@commitlint/cli": "17.4.4",
"@commitlint/config-conventional": "17.4.4", "@commitlint/config-conventional": "17.4.4",
"@semantic-release/changelog": "6.0.2",
"@semantic-release/commit-analyzer": "9.0.2",
"@semantic-release/exec": "6.0.3",
"@semantic-release/git": "10.0.1",
"@tailwindcss/aspect-ratio": "0.4.2", "@tailwindcss/aspect-ratio": "0.4.2",
"@tailwindcss/forms": "0.5.10", "@tailwindcss/forms": "0.5.10",
"@tailwindcss/typography": "0.5.16", "@tailwindcss/typography": "0.5.16",
@@ -168,6 +170,8 @@
"prettier": "2.8.4", "prettier": "2.8.4",
"prettier-plugin-organize-imports": "3.2.2", "prettier-plugin-organize-imports": "3.2.2",
"prettier-plugin-tailwindcss": "0.2.3", "prettier-plugin-tailwindcss": "0.2.3",
"semantic-release": "19.0.5",
"semantic-release-docker-buildx": "1.0.1",
"tailwindcss": "3.2.7", "tailwindcss": "3.2.7",
"ts-node": "10.9.1", "ts-node": "10.9.1",
"tsc-alias": "1.8.2", "tsc-alias": "1.8.2",
@@ -176,7 +180,7 @@
}, },
"engines": { "engines": {
"node": "^22.0.0", "node": "^22.0.0",
"pnpm": "^10.0.0" "pnpm": "^9.0.0"
}, },
"overrides": { "overrides": {
"sqlite3/node-gyp": "8.4.1", "sqlite3/node-gyp": "8.4.1",
@@ -205,50 +209,24 @@
"plugins": [ "plugins": [
"@semantic-release/commit-analyzer", "@semantic-release/commit-analyzer",
"@semantic-release/release-notes-generator", "@semantic-release/release-notes-generator",
[
"@semantic-release/changelog",
{
"changelogFile": "CHANGELOG.md"
}
],
"@semantic-release/npm", "@semantic-release/npm",
[ [
"@codedependant/semantic-release-docker", "@semantic-release/git",
{ {
"dockerArgs": { "assets": [
"COMMIT_TAG": "${GITHUB_SHA}" "package.json",
}, "CHANGELOG.md"
"dockerLogin": false,
"dockerProject": "fallenbagel",
"dockerImage": "jellyseerr",
"dockerTags": [
"latest",
"{{major}}",
"{{major}}.{{minor}}",
"{{major}}.{{minor}}.{{patch}}"
], ],
"dockerPlatform": [ "message": "chore(release): ${nextRelease.version}"
"linux/amd64",
"linux/arm64"
]
}
],
[
"@codedependant/semantic-release-docker",
{
"dockerArgs": {
"COMMIT_TAG": "${GITHUB_SHA}"
},
"dockerLogin": false,
"dockerRegistry": "ghcr.io",
"dockerProject": "fallenbagel",
"dockerImage": "jellyseerr",
"dockerTags": [
"latest",
"{{major}}",
"{{major}}.{{minor}}",
"{{major}}.{{minor}}.{{patch}}"
],
"dockerPlatform": [
"linux/amd64",
"linux/arm64"
]
} }
], ],
"semantic-release-docker-buildx",
[ [
"@semantic-release/github", "@semantic-release/github",
{ {
@@ -261,14 +239,21 @@
], ],
"npmPublish": false, "npmPublish": false,
"publish": [ "publish": [
"@codedependant/semantic-release-docker", {
"path": "semantic-release-docker-buildx",
"buildArgs": {
"COMMIT_TAG": "$GIT_SHA"
},
"imageNames": [
"fallenbagel/jellyseerr",
"ghcr.io/fallenbagel/jellyseerr"
],
"platforms": [
"linux/amd64",
"linux/arm64"
]
},
"@semantic-release/github" "@semantic-release/github"
] ]
},
"pnpm": {
"onlyBuiltDependencies": [
"sqlite3",
"bcrypt"
]
} }
} }

2300
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -48,7 +48,6 @@ export interface AnidbItem {
tvdbId?: number; tvdbId?: number;
tmdbId?: number; tmdbId?: number;
imdbId?: string; imdbId?: string;
tvdbSeason?: number;
} }
class AnimeListMapping { class AnimeListMapping {
@@ -98,7 +97,6 @@ class AnimeListMapping {
tvdbId: anime.$.defaulttvdbseason === '0' ? undefined : tvdbId, tvdbId: anime.$.defaulttvdbseason === '0' ? undefined : tvdbId,
tmdbId: tmdbId, tmdbId: tmdbId,
imdbId: imdbIds[0], // this is used for one AniDB -> one imdb movie mapping imdbId: imdbIds[0], // this is used for one AniDB -> one imdb movie mapping
tvdbSeason: Number(anime.$.defaulttvdbseason),
}; };
if (tvdbId) { if (tvdbId) {

View File

@@ -1,4 +1,3 @@
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
import type { AxiosInstance, AxiosRequestConfig } from 'axios'; import type { AxiosInstance, AxiosRequestConfig } from 'axios';
import axios from 'axios'; import axios from 'axios';
import rateLimit from 'axios-rate-limit'; import rateLimit from 'axios-rate-limit';
@@ -10,7 +9,7 @@ const DEFAULT_TTL = 300;
// 10 seconds default rolling buffer (in ms) // 10 seconds default rolling buffer (in ms)
const DEFAULT_ROLLING_BUFFER = 10000; const DEFAULT_ROLLING_BUFFER = 10000;
export interface ExternalAPIOptions { interface ExternalAPIOptions {
nodeCache?: NodeCache; nodeCache?: NodeCache;
headers?: Record<string, unknown>; headers?: Record<string, unknown>;
rateLimit?: { rateLimit?: {
@@ -38,7 +37,6 @@ class ExternalAPI {
...options.headers, ...options.headers,
}, },
}); });
this.axios.interceptors.request.use(requestInterceptorFunction);
if (options.rateLimit) { if (options.rateLimit) {
this.axios = rateLimit(this.axios, { this.axios = rateLimit(this.axios, {

View File

@@ -103,7 +103,6 @@ export interface JellyfinLibraryItemExtended extends JellyfinLibraryItem {
Tmdb?: string; Tmdb?: string;
Imdb?: string; Imdb?: string;
Tvdb?: string; Tvdb?: string;
AniDB?: string;
}; };
MediaSources?: JellyfinMediaSource[]; MediaSources?: JellyfinMediaSource[];
Width?: number; Width?: number;

View File

@@ -1,39 +0,0 @@
import type { TvShowProvider } from '@server/api/provider';
import TheMovieDb from '@server/api/themoviedb';
import Tvdb from '@server/api/tvdb';
import { getSettings, MetadataProviderType } from '@server/lib/settings';
import logger from '@server/logger';
export const getMetadataProvider = async (
mediaType: 'movie' | 'tv' | 'anime'
): Promise<TvShowProvider> => {
try {
const settings = await getSettings();
if (mediaType == 'movie') {
return new TheMovieDb();
}
if (
mediaType == 'tv' &&
settings.metadataSettings.tv == MetadataProviderType.TVDB
) {
return await Tvdb.getInstance();
}
if (
mediaType == 'anime' &&
settings.metadataSettings.anime == MetadataProviderType.TVDB
) {
return await Tvdb.getInstance();
}
return new TheMovieDb();
} catch (e) {
logger.error('Failed to get metadata provider', {
label: 'Metadata',
message: e.message,
});
return new TheMovieDb();
}
};

View File

@@ -113,7 +113,7 @@ interface MetadataResponse {
ratingKey: string; ratingKey: string;
type: 'movie' | 'show'; type: 'movie' | 'show';
title: string; title: string;
Guid?: { Guid: {
id: `imdb://tt${number}` | `tmdb://${number}` | `tvdb://${number}`; id: `imdb://tt${number}` | `tmdb://${number}` | `tvdb://${number}`;
}[]; }[];
}[]; }[];
@@ -277,18 +277,9 @@ class PlexTvAPI extends ExternalAPI {
}> { }> {
try { try {
const watchlistCache = cacheManager.getCache('plexwatchlist'); const watchlistCache = cacheManager.getCache('plexwatchlist');
logger.debug('Fetching watchlist from Plex.TV', {
offset,
size,
label: 'Plex.TV Metadata API',
});
let cachedWatchlist = watchlistCache.data.get<PlexWatchlistCache>( let cachedWatchlist = watchlistCache.data.get<PlexWatchlistCache>(
this.authToken this.authToken
); );
logger.debug(`Found cached watchlist: ${!!cachedWatchlist}`, {
cachedWatchlist,
label: 'Plex.TV Metadata API',
});
const response = await this.axios.get<WatchlistResponse>( const response = await this.axios.get<WatchlistResponse>(
'/library/sections/watchlist/all', '/library/sections/watchlist/all',
@@ -300,15 +291,11 @@ class PlexTvAPI extends ExternalAPI {
headers: { headers: {
'If-None-Match': cachedWatchlist?.etag, 'If-None-Match': cachedWatchlist?.etag,
}, },
baseURL: 'https://discover.provider.plex.tv', baseURL: 'https://metadata.provider.plex.tv',
validateStatus: (status) => status < 400, // Allow HTTP 304 to return without error validateStatus: (status) => status < 400, // Allow HTTP 304 to return without error
} }
); );
logger.debug(`Watchlist fetch returned status ${response.status}`, {
label: 'Plex.TV Metadata API',
});
// If we don't recieve HTTP 304, the watchlist has been updated and we need to update the cache. // If we don't recieve HTTP 304, the watchlist has been updated and we need to update the cache.
if (response.status >= 200 && response.status <= 299) { if (response.status >= 200 && response.status <= 299) {
cachedWatchlist = { cachedWatchlist = {
@@ -325,32 +312,19 @@ class PlexTvAPI extends ExternalAPI {
const watchlistDetails = await Promise.all( const watchlistDetails = await Promise.all(
(cachedWatchlist?.response.MediaContainer.Metadata ?? []).map( (cachedWatchlist?.response.MediaContainer.Metadata ?? []).map(
async (watchlistItem) => { async (watchlistItem) => {
let detailedResponse: MetadataResponse; const detailedResponse = await this.getRolling<MetadataResponse>(
try { `/library/metadata/${watchlistItem.ratingKey}`,
detailedResponse = await this.getRolling<MetadataResponse>( {
`/library/metadata/${watchlistItem.ratingKey}`, baseURL: 'https://metadata.provider.plex.tv',
{
baseURL: 'https://discover.provider.plex.tv',
}
);
} catch (e) {
if (e.response?.status === 404) {
logger.warn(
`Item with ratingKey ${watchlistItem.ratingKey} not found, it may have been removed from the server.`,
{ label: 'Plex.TV Metadata API' }
);
return null;
} else {
throw e;
} }
} );
const metadata = detailedResponse.MediaContainer.Metadata[0]; const metadata = detailedResponse.MediaContainer.Metadata[0];
const tmdbString = metadata.Guid?.find((guid) => const tmdbString = metadata.Guid.find((guid) =>
guid.id.startsWith('tmdb') guid.id.startsWith('tmdb')
); );
const tvdbString = metadata.Guid?.find((guid) => const tvdbString = metadata.Guid.find((guid) =>
guid.id.startsWith('tvdb') guid.id.startsWith('tvdb')
); );
@@ -369,9 +343,7 @@ class PlexTvAPI extends ExternalAPI {
) )
); );
const filteredList = watchlistDetails.filter( const filteredList = watchlistDetails.filter((detail) => detail.tmdbId);
(detail) => detail?.tmdbId
) as PlexWatchlistItem[];
return { return {
offset, offset,

View File

@@ -1,30 +0,0 @@
import type {
TmdbSeasonWithEpisodes,
TmdbTvDetails,
} from '@server/api/themoviedb/interfaces';
export interface TvShowProvider {
getTvShow({
tvId,
language,
}: {
tvId: number;
language?: string;
}): Promise<TmdbTvDetails>;
getTvSeason({
tvId,
seasonNumber,
language,
}: {
tvId: number;
seasonNumber: number;
language?: string;
}): Promise<TmdbSeasonWithEpisodes>;
getShowByTvdbId({
tvdbId,
language,
}: {
tvdbId: number;
language?: string;
}): Promise<TmdbTvDetails>;
}

View File

@@ -145,7 +145,6 @@ export interface IMDBRating {
title: string; title: string;
url: string; url: string;
criticsScore: number; criticsScore: number;
criticsScoreCount: number;
} }
/** /**
@@ -188,7 +187,6 @@ class IMDBRadarrProxy extends ExternalAPI {
title: data[0].Title, title: data[0].Title,
url: `https://www.imdb.com/title/${data[0].ImdbId}`, url: `https://www.imdb.com/title/${data[0].ImdbId}`,
criticsScore: data[0].MovieRatings.Imdb.Value, criticsScore: data[0].MovieRatings.Imdb.Value,
criticsScoreCount: data[0].MovieRatings.Imdb.Count,
}; };
} catch (e) { } catch (e) {
throw new Error( throw new Error(

View File

@@ -198,25 +198,6 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
} }
}; };
public renameTag = async ({
id,
label,
}: {
id: number;
label: string;
}): Promise<Tag> => {
try {
const response = await this.axios.put<Tag>(`/tag/${id}`, {
id,
label,
});
return response.data;
} catch (e) {
throw new Error(`[${this.apiName}] Failed to rename tag: ${e.message}`);
}
};
async refreshMonitoredDownloads(): Promise<void> { async refreshMonitoredDownloads(): Promise<void> {
await this.runCommand('RefreshMonitoredDownloads', {}); await this.runCommand('RefreshMonitoredDownloads', {});
} }

View File

@@ -1,7 +1,6 @@
import type { User } from '@server/entity/User'; import type { User } from '@server/entity/User';
import type { TautulliSettings } from '@server/lib/settings'; import type { TautulliSettings } from '@server/lib/settings';
import logger from '@server/logger'; import logger from '@server/logger';
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
import type { AxiosInstance } from 'axios'; import type { AxiosInstance } from 'axios';
import axios from 'axios'; import axios from 'axios';
import { uniqWith } from 'lodash'; import { uniqWith } from 'lodash';
@@ -124,7 +123,6 @@ class TautulliAPI {
}${settings.urlBase ?? ''}`, }${settings.urlBase ?? ''}`,
params: { apikey: settings.apiKey }, params: { apikey: settings.apiKey },
}); });
this.axios.interceptors.request.use(requestInterceptorFunction);
} }
public async getInfo(): Promise<TautulliInfo> { public async getInfo(): Promise<TautulliInfo> {

View File

@@ -1,5 +1,4 @@
import ExternalAPI from '@server/api/externalapi'; import ExternalAPI from '@server/api/externalapi';
import type { TvShowProvider } from '@server/api/provider';
import cacheManager from '@server/lib/cache'; import cacheManager from '@server/lib/cache';
import { getSettings } from '@server/lib/settings'; import { getSettings } from '@server/lib/settings';
import { sortBy } from 'lodash'; import { sortBy } from 'lodash';
@@ -86,7 +85,6 @@ interface DiscoverMovieOptions {
genre?: string; genre?: string;
studio?: string; studio?: string;
keywords?: string; keywords?: string;
excludeKeywords?: string;
sortBy?: SortOptions; sortBy?: SortOptions;
watchRegion?: string; watchRegion?: string;
watchProviders?: string; watchProviders?: string;
@@ -112,7 +110,6 @@ interface DiscoverTvOptions {
genre?: string; genre?: string;
network?: number; network?: number;
keywords?: string; keywords?: string;
excludeKeywords?: string;
sortBy?: SortOptions; sortBy?: SortOptions;
watchRegion?: string; watchRegion?: string;
watchProviders?: string; watchProviders?: string;
@@ -123,7 +120,7 @@ interface DiscoverTvOptions {
certificationCountry?: string; certificationCountry?: string;
} }
class TheMovieDb extends ExternalAPI implements TvShowProvider { class TheMovieDb extends ExternalAPI {
private locale: string; private locale: string;
private discoverRegion?: string; private discoverRegion?: string;
private originalLanguage?: string; private originalLanguage?: string;
@@ -344,13 +341,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
} }
); );
data.episodes = data.episodes.map((episode) => {
if (episode.still_path) {
episode.still_path = `https://image.tmdb.org/t/p/original/${episode.still_path}`;
}
return episode;
});
return data; return data;
} catch (e) { } catch (e) {
throw new Error(`[TMDB] Failed to fetch TV show details: ${e.message}`); throw new Error(`[TMDB] Failed to fetch TV show details: ${e.message}`);
@@ -497,7 +487,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
genre, genre,
studio, studio,
keywords, keywords,
excludeKeywords,
withRuntimeGte, withRuntimeGte,
withRuntimeLte, withRuntimeLte,
voteAverageGte, voteAverageGte,
@@ -548,7 +537,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
with_genres: genre, with_genres: genre,
with_companies: studio, with_companies: studio,
with_keywords: keywords, with_keywords: keywords,
without_keywords: excludeKeywords,
'with_runtime.gte': withRuntimeGte, 'with_runtime.gte': withRuntimeGte,
'with_runtime.lte': withRuntimeLte, 'with_runtime.lte': withRuntimeLte,
'vote_average.gte': voteAverageGte, 'vote_average.gte': voteAverageGte,
@@ -581,7 +569,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
genre, genre,
network, network,
keywords, keywords,
excludeKeywords,
withRuntimeGte, withRuntimeGte,
withRuntimeLte, withRuntimeLte,
voteAverageGte, voteAverageGte,
@@ -633,7 +620,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
with_genres: genre, with_genres: genre,
with_networks: network, with_networks: network,
with_keywords: keywords, with_keywords: keywords,
without_keywords: excludeKeywords,
'with_runtime.gte': withRuntimeGte, 'with_runtime.gte': withRuntimeGte,
'with_runtime.lte': withRuntimeLte, 'with_runtime.lte': withRuntimeLte,
'vote_average.gte': voteAverageGte, 'vote_average.gte': voteAverageGte,
@@ -1068,7 +1054,7 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
keywordId, keywordId,
}: { }: {
keywordId: number; keywordId: number;
}): Promise<TmdbKeyword | null> { }): Promise<TmdbKeyword> {
try { try {
const data = await this.get<TmdbKeyword>( const data = await this.get<TmdbKeyword>(
`/keyword/${keywordId}`, `/keyword/${keywordId}`,
@@ -1078,9 +1064,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
return data; return data;
} catch (e) { } catch (e) {
if (e.response?.status === 404) {
return null;
}
throw new Error(`[TMDB] Failed to fetch keyword: ${e.message}`); throw new Error(`[TMDB] Failed to fetch keyword: ${e.message}`);
} }
} }

View File

@@ -220,7 +220,7 @@ export interface TmdbTvEpisodeResult {
show_id: number; show_id: number;
still_path: string; still_path: string;
vote_average: number; vote_average: number;
vote_count: number; vote_cuont: number;
} }
export interface TmdbTvSeasonResult { export interface TmdbTvSeasonResult {

View File

@@ -1,563 +0,0 @@
import ExternalAPI from '@server/api/externalapi';
import type { TvShowProvider } from '@server/api/provider';
import TheMovieDb from '@server/api/themoviedb';
import type {
TmdbSeasonWithEpisodes,
TmdbTvDetails,
TmdbTvEpisodeResult,
TmdbTvSeasonResult,
} from '@server/api/themoviedb/interfaces';
import {
convertTmdbLanguageToTvdbWithFallback,
type TvdbBaseResponse,
type TvdbEpisode,
type TvdbLoginResponse,
type TvdbSeasonDetails,
type TvdbTvDetails,
} from '@server/api/tvdb/interfaces';
import cacheManager, { type AvailableCacheIds } from '@server/lib/cache';
import logger from '@server/logger';
interface TvdbConfig {
baseUrl: string;
maxRequestsPerSecond: number;
maxRequests: number;
cachePrefix: AvailableCacheIds;
}
const DEFAULT_CONFIG: TvdbConfig = {
baseUrl: 'https://api4.thetvdb.com/v4',
maxRequestsPerSecond: 50,
maxRequests: 20,
cachePrefix: 'tvdb' as const,
};
const enum TvdbIdStatus {
INVALID = -1,
}
type TvdbId = number;
type ValidTvdbId = Exclude<TvdbId, TvdbIdStatus.INVALID>;
class Tvdb extends ExternalAPI implements TvShowProvider {
static instance: Tvdb;
private readonly tmdb: TheMovieDb;
private static readonly DEFAULT_CACHE_TTL = 43200;
private static readonly DEFAULT_LANGUAGE = 'eng';
private token: string;
private pin?: string;
constructor(pin?: string) {
const finalConfig = { ...DEFAULT_CONFIG };
super(
finalConfig.baseUrl,
{},
{
nodeCache: cacheManager.getCache(finalConfig.cachePrefix).data,
rateLimit: {
maxRequests: finalConfig.maxRequests,
maxRPS: finalConfig.maxRequestsPerSecond,
},
}
);
this.pin = pin;
this.tmdb = new TheMovieDb();
}
public static async getInstance(): Promise<Tvdb> {
if (!this.instance) {
this.instance = new Tvdb();
await this.instance.login();
}
return this.instance;
}
private async refreshToken(): Promise<void> {
try {
if (!this.token) {
await this.login();
return;
}
const base64Url = this.token.split('.')[1];
const base64 = base64Url.replace(/-/g, '+').replace(/_/g, '/');
const payload = JSON.parse(Buffer.from(base64, 'base64').toString());
if (!payload.exp) {
await this.login();
}
const now = Math.floor(Date.now() / 1000);
const diff = payload.exp - now;
// refresh token 1 week before expiration
if (diff < 604800) {
await this.login();
}
} catch (error) {
this.handleError('Failed to refresh token', error);
}
}
public async test(): Promise<void> {
try {
await this.login();
} catch (error) {
this.handleError('Login failed', error);
throw error;
}
}
async login(): Promise<TvdbLoginResponse> {
let body: { apiKey: string; pin?: string } = {
apiKey: 'd00d9ecb-a9d0-4860-958a-74b14a041405',
};
if (this.pin) {
body = {
...body,
pin: this.pin,
};
}
const response = await this.post<TvdbBaseResponse<TvdbLoginResponse>>(
'/login',
{
...body,
}
);
this.token = response.data.token;
return response.data;
}
public async getShowByTvdbId({
tvdbId,
language,
}: {
tvdbId: number;
language?: string;
}): Promise<TmdbTvDetails> {
try {
const tmdbTvShow = await this.tmdb.getShowByTvdbId({
tvdbId: tvdbId,
language,
});
try {
await this.refreshToken();
const validTvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
if (this.isValidTvdbId(validTvdbId)) {
return this.enrichTmdbShowWithTvdbData(tmdbTvShow, validTvdbId);
}
return tmdbTvShow;
} catch (error) {
return tmdbTvShow;
}
} catch (error) {
this.handleError('Failed to fetch TV show details', error);
throw error;
}
}
public async getTvShow({
tvId,
language,
}: {
tvId: number;
language?: string;
}): Promise<TmdbTvDetails> {
try {
const tmdbTvShow = await this.tmdb.getTvShow({ tvId, language });
try {
await this.refreshToken();
const tvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
if (this.isValidTvdbId(tvdbId)) {
return await this.enrichTmdbShowWithTvdbData(tmdbTvShow, tvdbId);
}
return tmdbTvShow;
} catch (error) {
this.handleError('Failed to fetch TV show details', error);
return tmdbTvShow;
}
} catch (error) {
this.handleError('Failed to fetch TV show details', error);
return this.tmdb.getTvShow({ tvId, language });
}
}
public async getTvSeason({
tvId,
seasonNumber,
language = Tvdb.DEFAULT_LANGUAGE,
}: {
tvId: number;
seasonNumber: number;
language?: string;
}): Promise<TmdbSeasonWithEpisodes> {
try {
const tmdbTvShow = await this.tmdb.getTvShow({ tvId, language });
try {
await this.refreshToken();
const tvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
if (!this.isValidTvdbId(tvdbId)) {
return await this.tmdb.getTvSeason({ tvId, seasonNumber, language });
}
return await this.getTvdbSeasonData(
tvdbId,
seasonNumber,
tvId,
language
);
} catch (error) {
this.handleError('Failed to fetch TV season details', error);
return await this.tmdb.getTvSeason({ tvId, seasonNumber, language });
}
} catch (error) {
logger.error(
`[TVDB] Failed to fetch TV season details: ${error.message}`
);
throw error;
}
}
private async enrichTmdbShowWithTvdbData(
tmdbTvShow: TmdbTvDetails,
tvdbId: ValidTvdbId
): Promise<TmdbTvDetails> {
try {
await this.refreshToken();
const tvdbData = await this.fetchTvdbShowData(tvdbId);
const seasons = this.processSeasons(tvdbData);
if (!seasons.length) {
return tmdbTvShow;
}
return { ...tmdbTvShow, seasons };
} catch (error) {
logger.error(
`Failed to enrich TMDB show with TVDB data: ${error.message} token: ${this.token}`
);
return tmdbTvShow;
}
}
private async fetchTvdbShowData(tvdbId: number): Promise<TvdbTvDetails> {
const resp = await this.get<TvdbBaseResponse<TvdbTvDetails>>(
`/series/${tvdbId}/extended?meta=episodes&short=true`,
{
headers: {
Authorization: `Bearer ${this.token}`,
},
},
Tvdb.DEFAULT_CACHE_TTL
);
return resp.data;
}
private processSeasons(tvdbData: TvdbTvDetails): TmdbTvSeasonResult[] {
if (!tvdbData || !tvdbData.seasons || !tvdbData.episodes) {
return [];
}
const seasons = tvdbData.seasons
.filter((season) => season.type && season.type.type === 'official')
.sort((a, b) => a.number - b.number)
.map((season) => this.createSeasonData(season, tvdbData))
.filter(
(season) => season && season.season_number >= 0
) as TmdbTvSeasonResult[];
return seasons;
}
private createSeasonData(
season: TvdbSeasonDetails,
tvdbData: TvdbTvDetails
): TmdbTvSeasonResult {
const seasonNumber = season.number ?? -1;
if (seasonNumber < 0) {
return {
id: 0,
episode_count: 0,
name: '',
overview: '',
season_number: -1,
poster_path: '',
air_date: '',
};
}
const episodeCount = tvdbData.episodes.filter(
(episode) => episode.seasonNumber === season.number
).length;
return {
id: tvdbData.id,
episode_count: episodeCount,
name: `${season.number}`,
overview: '',
season_number: season.number,
poster_path: '',
air_date: '',
};
}
private async getTvdbSeasonData(
tvdbId: number,
seasonNumber: number,
tvId: number,
language: string = Tvdb.DEFAULT_LANGUAGE
): Promise<TmdbSeasonWithEpisodes> {
const tvdbData = await this.fetchTvdbShowData(tvdbId);
if (!tvdbData) {
logger.error(`Failed to fetch TVDB data for ID: ${tvdbId}`);
return this.createEmptySeasonResponse(tvId);
}
// get season id
const season = tvdbData.seasons.find(
(season) =>
season.number === seasonNumber &&
season.type.type &&
season.type.type === 'official'
);
if (!season) {
logger.error(
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
);
return this.createEmptySeasonResponse(tvId);
}
const wantedTranslation = convertTmdbLanguageToTvdbWithFallback(
language,
Tvdb.DEFAULT_LANGUAGE
);
// check if translation is available for the season
const availableTranslation = season.nameTranslations.filter(
(translation) =>
translation === wantedTranslation ||
translation === Tvdb.DEFAULT_LANGUAGE
);
if (!availableTranslation) {
return this.getSeasonWithOriginalLanguage(
tvdbId,
tvId,
seasonNumber,
season
);
}
return this.getSeasonWithTranslation(
tvdbId,
tvId,
seasonNumber,
season,
wantedTranslation
);
}
private async getSeasonWithTranslation(
tvdbId: number,
tvId: number,
seasonNumber: number,
season: TvdbSeasonDetails,
language: string
): Promise<TmdbSeasonWithEpisodes> {
if (!season) {
logger.error(
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
);
return this.createEmptySeasonResponse(tvId);
}
const allEpisodes = [] as TvdbEpisode[];
let page = 0;
// Limit to max 50 pages to avoid infinite loops.
// 50 pages with 500 items per page = 25_000 episodes in a series which should be more than enough
const maxPages = 50;
while (page < maxPages) {
const resp = await this.get<TvdbBaseResponse<TvdbSeasonDetails>>(
`/series/${tvdbId}/episodes/default/${language}`,
{
headers: {
Authorization: `Bearer ${this.token}`,
},
params: {
page: page,
},
}
);
if (!resp?.data?.episodes) {
logger.warn(
`No episodes found for TVDB ID: ${tvdbId} on page ${page} for season ${seasonNumber}`
);
break;
}
const { episodes } = resp.data;
if (!episodes) {
logger.debug(
`No more episodes found for TVDB ID: ${tvdbId} on page ${page} for season ${seasonNumber}`
);
break;
}
allEpisodes.push(...episodes);
const hasNextPage = resp.links?.next && episodes.length > 0;
if (!hasNextPage) {
break;
}
page++;
}
if (page >= maxPages) {
logger.warn(
`Reached max pages (${maxPages}) for TVDB ID: ${tvdbId} on season ${seasonNumber} with language ${language}. There might be more episodes available.`
);
}
const episodes = this.processEpisodes(
{ ...season, episodes: allEpisodes },
seasonNumber,
tvId
);
return {
episodes,
external_ids: { tvdb_id: tvdbId },
name: '',
overview: '',
id: season.id,
air_date: season.firstAired,
season_number: episodes.length,
};
}
private async getSeasonWithOriginalLanguage(
tvdbId: number,
tvId: number,
seasonNumber: number,
season: TvdbSeasonDetails
): Promise<TmdbSeasonWithEpisodes> {
if (!season) {
logger.error(
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
);
return this.createEmptySeasonResponse(tvId);
}
const resp = await this.get<TvdbBaseResponse<TvdbSeasonDetails>>(
`/seasons/${season.id}/extended`,
{
headers: {
Authorization: `Bearer ${this.token}`,
},
}
);
const seasons = resp.data;
const episodes = this.processEpisodes(seasons, seasonNumber, tvId);
return {
episodes,
external_ids: { tvdb_id: tvdbId },
name: '',
overview: '',
id: seasons.id,
air_date: seasons.firstAired,
season_number: episodes.length,
};
}
private processEpisodes(
tvdbSeason: TvdbSeasonDetails,
seasonNumber: number,
tvId: number
): TmdbTvEpisodeResult[] {
if (!tvdbSeason || !tvdbSeason.episodes) {
logger.error('No episodes found in TVDB season data');
return [];
}
return tvdbSeason.episodes
.filter((episode) => episode.seasonNumber === seasonNumber)
.map((episode, index) => this.createEpisodeData(episode, index, tvId));
}
private createEpisodeData(
episode: TvdbEpisode,
index: number,
tvId: number
): TmdbTvEpisodeResult {
return {
id: episode.id,
air_date: episode.aired,
episode_number: episode.number,
name: episode.name || `Episode ${index + 1}`,
overview: episode.overview || '',
season_number: episode.seasonNumber,
production_code: '',
show_id: tvId,
still_path:
episode.image && !episode.image.startsWith('https://')
? 'https://artworks.thetvdb.com' + episode.image
: '',
vote_average: 1,
vote_count: 1,
};
}
private createEmptySeasonResponse(tvId: number): TmdbSeasonWithEpisodes {
return {
episodes: [],
external_ids: { tvdb_id: tvId },
name: '',
overview: '',
id: 0,
air_date: '',
season_number: 0,
};
}
private getTvdbIdFromTmdb(tmdbTvShow: TmdbTvDetails): TvdbId {
return tmdbTvShow?.external_ids?.tvdb_id ?? TvdbIdStatus.INVALID;
}
private isValidTvdbId(tvdbId: TvdbId): tvdbId is ValidTvdbId {
return tvdbId !== TvdbIdStatus.INVALID;
}
private handleError(context: string, error: Error): void {
throw new Error(`[TVDB] ${context}: ${error.message}`);
}
}
export default Tvdb;

View File

@@ -1,216 +0,0 @@
import { type AvailableLocale } from '@server/types/languages';
export interface TvdbBaseResponse<T> {
data: T;
errors: string;
links?: TvdbPagination;
}
export interface TvdbPagination {
prev?: string;
self: string;
next?: string;
totalItems: number;
pageSize: number;
}
export interface TvdbLoginResponse {
token: string;
}
interface TvDetailsAliases {
language: string;
name: string;
}
interface TvDetailsStatus {
id: number;
name: string;
recordType: string;
keepUpdated: boolean;
}
export interface TvdbTvDetails {
id: number;
name: string;
slug: string;
image: string;
nameTranslations: string[];
overwiewTranslations: string[];
aliases: TvDetailsAliases[];
firstAired: Date;
lastAired: Date;
nextAired: Date | string;
score: number;
status: TvDetailsStatus;
originalCountry: string;
originalLanguage: string;
defaultSeasonType: string;
isOrderRandomized: boolean;
lastUpdated: Date;
averageRuntime: number;
seasons: TvdbSeasonDetails[];
episodes: TvdbEpisode[];
}
interface TvdbCompanyType {
companyTypeId: number;
companyTypeName: string;
}
interface TvdbParentCompany {
id?: number;
name?: string;
relation?: {
id?: number;
typeName?: string;
};
}
interface TvdbCompany {
id: number;
name: string;
slug: string;
nameTranslations?: string[];
overviewTranslations?: string[];
aliases?: string[];
country: string;
primaryCompanyType: number;
activeDate: string;
inactiveDate?: string;
companyType: TvdbCompanyType;
parentCompany: TvdbParentCompany;
tagOptions?: string[];
}
interface TvdbType {
id: number;
name: string;
type: string;
alternateName?: string;
}
interface TvdbArtwork {
id: number;
image: string;
thumbnail: string;
language: string;
type: number;
score: number;
width: number;
height: number;
includesText: boolean;
}
export interface TvdbEpisode {
id: number;
seriesId: number;
name: string;
aired: string;
runtime: number;
nameTranslations: string[];
overview?: string;
overviewTranslations: string[];
image: string;
imageType: number;
isMovie: number;
seasons?: string[];
number: number;
absoluteNumber: number;
seasonNumber: number;
lastUpdated: string;
finaleType?: string;
year: string;
}
export interface TvdbSeasonDetails {
id: number;
seriesId: number;
type: TvdbType;
number: number;
nameTranslations: string[];
overviewTranslations: string[];
image: string;
imageType: number;
companies: {
studio: TvdbCompany[];
network: TvdbCompany[];
production: TvdbCompany[];
distributor: TvdbCompany[];
special_effects: TvdbCompany[];
};
lastUpdated: string;
year: string;
episodes: TvdbEpisode[];
trailers: string[];
artwork: TvdbArtwork[];
tagOptions?: string[];
firstAired: string;
}
export interface TvdbEpisodeTranslation {
name: string;
overview: string;
language: string;
}
const TMDB_TO_TVDB_MAPPING: Record<string, string> & {
[key in AvailableLocale]: string;
} = {
ar: 'ara', // Arabic
bg: 'bul', // Bulgarian
ca: 'cat', // Catalan
cs: 'ces', // Czech
da: 'dan', // Danish
de: 'deu', // German
el: 'ell', // Greek
en: 'eng', // English
es: 'spa', // Spanish
fi: 'fin', // Finnish
fr: 'fra', // French
he: 'heb', // Hebrew
hi: 'hin', // Hindi
hr: 'hrv', // Croatian
hu: 'hun', // Hungarian
it: 'ita', // Italian
ja: 'jpn', // Japanese
ko: 'kor', // Korean
lt: 'lit', // Lithuanian
nl: 'nld', // Dutch
pl: 'pol', // Polish
ro: 'ron', // Romanian
ru: 'rus', // Russian
sq: 'sqi', // Albanian
sr: 'srp', // Serbian
sv: 'swe', // Swedish
tr: 'tur', // Turkish
uk: 'ukr', // Ukrainian
'es-MX': 'spa', // Spanish (Latin America) -> Spanish
'nb-NO': 'nor', // Norwegian Bokmål -> Norwegian
'pt-BR': 'pt', // Portuguese (Brazil) -> Portuguese - Brazil (from TVDB data)
'pt-PT': 'por', // Portuguese (Portugal) -> Portuguese - Portugal (from TVDB data)
'zh-CN': 'zho', // Chinese (Simplified) -> Chinese - China
'zh-TW': 'zhtw', // Chinese (Traditional) -> Chinese - Taiwan
};
export function convertTMDBToTVDB(tmdbCode: string): string | null {
const normalizedCode = tmdbCode.toLowerCase();
return (
TMDB_TO_TVDB_MAPPING[tmdbCode] ||
TMDB_TO_TVDB_MAPPING[normalizedCode] ||
null
);
}
export function convertTmdbLanguageToTvdbWithFallback(
tmdbCode: string,
fallback: string
): string {
// First try exact match
const tvdbCode = convertTMDBToTVDB(tmdbCode);
if (tvdbCode) return tvdbCode;
return tvdbCode || fallback || 'eng'; // Default to English if no match found
}

View File

@@ -9,6 +9,7 @@ import notificationManager from '@server/lib/notifications';
import DiscordAgent from '@server/lib/notifications/agents/discord'; import DiscordAgent from '@server/lib/notifications/agents/discord';
import EmailAgent from '@server/lib/notifications/agents/email'; import EmailAgent from '@server/lib/notifications/agents/email';
import GotifyAgent from '@server/lib/notifications/agents/gotify'; import GotifyAgent from '@server/lib/notifications/agents/gotify';
import LunaSeaAgent from '@server/lib/notifications/agents/lunasea';
import NtfyAgent from '@server/lib/notifications/agents/ntfy'; import NtfyAgent from '@server/lib/notifications/agents/ntfy';
import PushbulletAgent from '@server/lib/notifications/agents/pushbullet'; import PushbulletAgent from '@server/lib/notifications/agents/pushbullet';
import PushoverAgent from '@server/lib/notifications/agents/pushover'; import PushoverAgent from '@server/lib/notifications/agents/pushover';
@@ -25,10 +26,8 @@ import imageproxy from '@server/routes/imageproxy';
import { appDataPermissions } from '@server/utils/appDataVolume'; import { appDataPermissions } from '@server/utils/appDataVolume';
import { getAppVersion } from '@server/utils/appVersion'; import { getAppVersion } from '@server/utils/appVersion';
import createCustomProxyAgent from '@server/utils/customProxyAgent'; import createCustomProxyAgent from '@server/utils/customProxyAgent';
import { initializeDnsCache } from '@server/utils/dnsCache';
import restartFlag from '@server/utils/restartFlag'; import restartFlag from '@server/utils/restartFlag';
import { getClientIp } from '@supercharge/request-ip'; import { getClientIp } from '@supercharge/request-ip';
import axios from 'axios';
import { TypeormStore } from 'connect-typeorm/out'; import { TypeormStore } from 'connect-typeorm/out';
import cookieParser from 'cookie-parser'; import cookieParser from 'cookie-parser';
import type { NextFunction, Request, Response } from 'express'; import type { NextFunction, Request, Response } from 'express';
@@ -36,8 +35,6 @@ import express from 'express';
import * as OpenApiValidator from 'express-openapi-validator'; import * as OpenApiValidator from 'express-openapi-validator';
import type { Store } from 'express-session'; import type { Store } from 'express-session';
import session from 'express-session'; import session from 'express-session';
import http from 'http';
import https from 'https';
import next from 'next'; import next from 'next';
import path from 'path'; import path from 'path';
import swaggerUi from 'swagger-ui-express'; import swaggerUi from 'swagger-ui-express';
@@ -76,19 +73,6 @@ app
const settings = await getSettings().load(); const settings = await getSettings().load();
restartFlag.initializeSettings(settings); restartFlag.initializeSettings(settings);
if (settings.network.forceIpv4First) {
axios.defaults.httpAgent = new http.Agent({ family: 4 });
axios.defaults.httpsAgent = new https.Agent({ family: 4 });
}
// Add DNS caching
if (settings.network.dnsCache?.enabled) {
initializeDnsCache({
forceMinTtl: settings.network.dnsCache.forceMinTtl,
forceMaxTtl: settings.network.dnsCache.forceMaxTtl,
});
}
// Register HTTP proxy // Register HTTP proxy
if (settings.network.proxy.enabled) { if (settings.network.proxy.enabled) {
await createCustomProxyAgent(settings.network.proxy); await createCustomProxyAgent(settings.network.proxy);
@@ -121,6 +105,7 @@ app
new EmailAgent(), new EmailAgent(),
new GotifyAgent(), new GotifyAgent(),
new NtfyAgent(), new NtfyAgent(),
new LunaSeaAgent(),
new PushbulletAgent(), new PushbulletAgent(),
new PushoverAgent(), new PushoverAgent(),
new SlackAgent(), new SlackAgent(),

View File

@@ -1,4 +1,3 @@
import type { DnsEntries, DnsStats } from 'dns-caching';
import type { PaginatedResponse } from './common'; import type { PaginatedResponse } from './common';
export type LogMessage = { export type LogMessage = {
@@ -65,10 +64,6 @@ export interface CacheItem {
export interface CacheResponse { export interface CacheResponse {
apiCaches: CacheItem[]; apiCaches: CacheItem[];
imageCache: Record<'tmdb' | 'avatar', { size: number; imageCount: number }>; imageCache: Record<'tmdb' | 'avatar', { size: number; imageCount: number }>;
dnsCache: {
stats: DnsStats | undefined;
entries: DnsEntries | undefined;
};
} }
export interface StatusResponse { export interface StatusResponse {

View File

@@ -72,7 +72,6 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
const blacklistedTagsArr = blacklistedTags.split(','); const blacklistedTagsArr = blacklistedTags.split(',');
const pageLimit = settings.main.blacklistedTagsLimit; const pageLimit = settings.main.blacklistedTagsLimit;
const invalidKeywords = new Set<string>();
if (blacklistedTags.length === 0) { if (blacklistedTags.length === 0) {
return; return;
@@ -88,19 +87,6 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
// Iterate for each tag // Iterate for each tag
for (const tag of blacklistedTagsArr) { for (const tag of blacklistedTagsArr) {
const keywordDetails = await tmdb.getKeywordDetails({
keywordId: Number(tag),
});
if (keywordDetails === null) {
logger.warn('Skipping invalid keyword in blacklisted tags', {
label: 'Blacklisted Tags Processor',
keywordId: tag,
});
invalidKeywords.add(tag);
continue;
}
let queryMax = pageLimit * SortOptionsIterable.length; let queryMax = pageLimit * SortOptionsIterable.length;
let fixedSortMode = false; // Set to true when the page limit allows for getting every page of tag let fixedSortMode = false; // Set to true when the page limit allows for getting every page of tag
@@ -116,51 +102,24 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
throw new AbortTransaction(); throw new AbortTransaction();
} }
try { const response = await getDiscover({
const response = await getDiscover({ page,
page, sortBy,
sortBy, keywords: tag,
keywords: tag, });
}); await this.processResults(response, tag, type, em);
await new Promise((res) => setTimeout(res, TMDB_API_DELAY_MS));
await this.processResults(response, tag, type, em); this.progress++;
await new Promise((res) => setTimeout(res, TMDB_API_DELAY_MS)); if (page === 1 && response.total_pages <= queryMax) {
// We will finish the tag with less queries than expected, move progress accordingly
this.progress++; this.progress += queryMax - response.total_pages;
if (page === 1 && response.total_pages <= queryMax) { fixedSortMode = true;
// We will finish the tag with less queries than expected, move progress accordingly queryMax = response.total_pages;
this.progress += queryMax - response.total_pages;
fixedSortMode = true;
queryMax = response.total_pages;
}
} catch (error) {
logger.error('Error processing keyword in blacklisted tags', {
label: 'Blacklisted Tags Processor',
keywordId: tag,
errorMessage: error.message,
});
} }
} }
} }
} }
if (invalidKeywords.size > 0) {
const currentTags = blacklistedTagsArr.filter(
(tag) => !invalidKeywords.has(tag)
);
const cleanedTags = currentTags.join(',');
if (cleanedTags !== blacklistedTags) {
settings.main.blacklistedTags = cleanedTags;
await settings.save();
logger.info('Cleaned up invalid keywords from settings', {
label: 'Blacklisted Tags Processor',
removedKeywords: Array.from(invalidKeywords),
newBlacklistedTags: cleanedTags,
});
}
}
} }
private async processResults( private async processResults(

View File

@@ -9,8 +9,7 @@ export type AvailableCacheIds =
| 'github' | 'github'
| 'plexguid' | 'plexguid'
| 'plextv' | 'plextv'
| 'plexwatchlist' | 'plexwatchlist';
| 'tvdb';
const DEFAULT_TTL = 300; const DEFAULT_TTL = 300;
const DEFAULT_CHECK_PERIOD = 120; const DEFAULT_CHECK_PERIOD = 120;
@@ -71,10 +70,6 @@ class CacheManager {
checkPeriod: 60, checkPeriod: 60,
}), }),
plexwatchlist: new Cache('plexwatchlist', 'Plex Watchlist'), plexwatchlist: new Cache('plexwatchlist', 'Plex Watchlist'),
tvdb: new Cache('tvdb', 'The TVDB API', {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
}; };
public getCache(id: AvailableCacheIds): Cache { public getCache(id: AvailableCacheIds): Cache {

View File

@@ -1,5 +1,4 @@
import logger from '@server/logger'; import logger from '@server/logger';
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
import axios from 'axios'; import axios from 'axios';
import rateLimit, { type rateLimitOptions } from 'axios-rate-limit'; import rateLimit, { type rateLimitOptions } from 'axios-rate-limit';
import { createHash } from 'crypto'; import { createHash } from 'crypto';
@@ -151,7 +150,6 @@ class ImageProxy {
baseURL: baseUrl, baseURL: baseUrl,
headers: options.headers, headers: options.headers,
}); });
this.axios.interceptors.request.use(requestInterceptorFunction);
if (options.rateLimitOptions) { if (options.rateLimitOptions) {
this.axios = rateLimit(this.axios, options.rateLimitOptions); this.axios = rateLimit(this.axios, options.rateLimitOptions);

View File

@@ -109,9 +109,7 @@ class DiscordAgent
type: Notification, type: Notification,
payload: NotificationPayload payload: NotificationPayload
): DiscordRichEmbed { ): DiscordRichEmbed {
const settings = getSettings(); const { applicationUrl } = getSettings().main;
const { applicationUrl } = settings.main;
const { embedPoster } = settings.notifications.agents.discord;
const appUrl = const appUrl =
applicationUrl || `http://localhost:${process.env.port || 5055}`; applicationUrl || `http://localhost:${process.env.port || 5055}`;
@@ -225,11 +223,9 @@ class DiscordAgent
} }
: undefined, : undefined,
fields, fields,
thumbnail: embedPoster thumbnail: {
? { url: payload.image,
url: payload.image, },
}
: undefined,
}; };
} }

View File

@@ -48,9 +48,7 @@ class EmailAgent
recipientEmail: string, recipientEmail: string,
recipientName?: string recipientName?: string
): EmailOptions | undefined { ): EmailOptions | undefined {
const settings = getSettings(); const { applicationUrl, applicationTitle } = getSettings().main;
const { applicationUrl, applicationTitle } = settings.main;
const { embedPoster } = settings.notifications.agents.email;
if (type === Notification.TEST_NOTIFICATION) { if (type === Notification.TEST_NOTIFICATION) {
return { return {
@@ -131,7 +129,7 @@ class EmailAgent
body, body,
mediaName: payload.subject, mediaName: payload.subject,
mediaExtra: payload.extra ?? [], mediaExtra: payload.extra ?? [],
imageUrl: embedPoster ? payload.image : undefined, imageUrl: payload.image,
timestamp: new Date().toTimeString(), timestamp: new Date().toTimeString(),
requestedBy: payload.request.requestedBy.displayName, requestedBy: payload.request.requestedBy.displayName,
actionUrl: applicationUrl actionUrl: applicationUrl
@@ -178,7 +176,7 @@ class EmailAgent
issueComment: payload.comment?.message, issueComment: payload.comment?.message,
mediaName: payload.subject, mediaName: payload.subject,
extra: payload.extra ?? [], extra: payload.extra ?? [],
imageUrl: embedPoster ? payload.image : undefined, imageUrl: payload.image,
timestamp: new Date().toTimeString(), timestamp: new Date().toTimeString(),
actionUrl: applicationUrl actionUrl: applicationUrl
? `${applicationUrl}/issues/${payload.issue.id}` ? `${applicationUrl}/issues/${payload.issue.id}`

View File

@@ -35,7 +35,7 @@ class GotifyAgent
settings.enabled && settings.enabled &&
settings.options.url && settings.options.url &&
settings.options.token && settings.options.token &&
settings.options.priority !== undefined settings.options.priority
) { ) {
return true; return true;
} }

View File

@@ -0,0 +1,133 @@
import { IssueStatus, IssueType } from '@server/constants/issue';
import { MediaStatus } from '@server/constants/media';
import type { NotificationAgentLunaSea } from '@server/lib/settings';
import { getSettings } from '@server/lib/settings';
import logger from '@server/logger';
import axios from 'axios';
import { hasNotificationType, Notification } from '..';
import type { NotificationAgent, NotificationPayload } from './agent';
import { BaseAgent } from './agent';
class LunaSeaAgent
extends BaseAgent<NotificationAgentLunaSea>
implements NotificationAgent
{
protected getSettings(): NotificationAgentLunaSea {
if (this.settings) {
return this.settings;
}
const settings = getSettings();
return settings.notifications.agents.lunasea;
}
private buildPayload(type: Notification, payload: NotificationPayload) {
return {
notification_type: Notification[type],
event: payload.event,
subject: payload.subject,
message: payload.message,
image: payload.image ?? null,
email: payload.notifyUser?.email,
username: payload.notifyUser?.displayName,
avatar: payload.notifyUser?.avatar,
media: payload.media
? {
media_type: payload.media.mediaType,
tmdbId: payload.media.tmdbId,
tvdbId: payload.media.tvdbId,
status: MediaStatus[payload.media.status],
status4k: MediaStatus[payload.media.status4k],
}
: null,
extra: payload.extra ?? [],
request: payload.request
? {
request_id: payload.request.id,
requestedBy_email: payload.request.requestedBy.email,
requestedBy_username: payload.request.requestedBy.displayName,
requestedBy_avatar: payload.request.requestedBy.avatar,
}
: null,
issue: payload.issue
? {
issue_id: payload.issue.id,
issue_type: IssueType[payload.issue.issueType],
issue_status: IssueStatus[payload.issue.status],
createdBy_email: payload.issue.createdBy.email,
createdBy_username: payload.issue.createdBy.displayName,
createdBy_avatar: payload.issue.createdBy.avatar,
}
: null,
comment: payload.comment
? {
comment_message: payload.comment.message,
commentedBy_email: payload.comment.user.email,
commentedBy_username: payload.comment.user.displayName,
commentedBy_avatar: payload.comment.user.avatar,
}
: null,
};
}
public shouldSend(): boolean {
const settings = this.getSettings();
if (settings.enabled && settings.options.webhookUrl) {
return true;
}
return false;
}
public async send(
type: Notification,
payload: NotificationPayload
): Promise<boolean> {
const settings = this.getSettings();
if (
!payload.notifySystem ||
!hasNotificationType(type, settings.types ?? 0)
) {
return true;
}
logger.debug('Sending LunaSea notification', {
label: 'Notifications',
type: Notification[type],
subject: payload.subject,
});
try {
await axios.post(
settings.options.webhookUrl,
this.buildPayload(type, payload),
settings.options.profileName
? {
headers: {
Authorization: `Basic ${Buffer.from(
`${settings.options.profileName}:`
).toString('base64')}`,
},
}
: undefined
);
return true;
} catch (e) {
logger.error('Error sending LunaSea notification', {
label: 'Notifications',
type: Notification[type],
subject: payload.subject,
errorMessage: e.message,
response: e?.response?.data,
});
return false;
}
}
}
export default LunaSeaAgent;

View File

@@ -22,9 +22,7 @@ class NtfyAgent
} }
private buildPayload(type: Notification, payload: NotificationPayload) { private buildPayload(type: Notification, payload: NotificationPayload) {
const settings = getSettings(); const { applicationUrl } = getSettings().main;
const { applicationUrl } = settings.main;
const { embedPoster } = settings.notifications.agents.ntfy;
const topic = this.getSettings().options.topic; const topic = this.getSettings().options.topic;
const priority = 3; const priority = 3;
@@ -74,7 +72,7 @@ class NtfyAgent
message += `\n\n**${extra.name}**\n${extra.value}`; message += `\n\n**${extra.name}**\n${extra.value}`;
} }
const attach = embedPoster ? payload.image : undefined; const attach = payload.image;
let click; let click;
if (applicationUrl && payload.media) { if (applicationUrl && payload.media) {

View File

@@ -78,9 +78,7 @@ class PushoverAgent
type: Notification, type: Notification,
payload: NotificationPayload payload: NotificationPayload
): Promise<Partial<PushoverPayload>> { ): Promise<Partial<PushoverPayload>> {
const settings = getSettings(); const { applicationUrl, applicationTitle } = getSettings().main;
const { applicationUrl, applicationTitle } = settings.main;
const { embedPoster } = settings.notifications.agents.pushover;
const title = payload.event ?? payload.subject; const title = payload.event ?? payload.subject;
let message = payload.event ? `<b>${payload.subject}</b>` : ''; let message = payload.event ? `<b>${payload.subject}</b>` : '';
@@ -157,7 +155,7 @@ class PushoverAgent
let attachment_base64; let attachment_base64;
let attachment_type; let attachment_type;
if (embedPoster && payload.image) { if (payload.image) {
const imagePayload = await this.getImagePayload(payload.image); const imagePayload = await this.getImagePayload(payload.image);
if (imagePayload.attachment_base64 && imagePayload.attachment_type) { if (imagePayload.attachment_base64 && imagePayload.attachment_type) {
attachment_base64 = imagePayload.attachment_base64; attachment_base64 = imagePayload.attachment_base64;

View File

@@ -63,9 +63,7 @@ class SlackAgent
type: Notification, type: Notification,
payload: NotificationPayload payload: NotificationPayload
): SlackBlockEmbed { ): SlackBlockEmbed {
const settings = getSettings(); const { applicationUrl, applicationTitle } = getSettings().main;
const { applicationUrl, applicationTitle } = settings.main;
const { embedPoster } = settings.notifications.agents.slack;
const fields: EmbedField[] = []; const fields: EmbedField[] = [];
@@ -161,14 +159,13 @@ class SlackAgent
type: 'mrkdwn', type: 'mrkdwn',
text: payload.message, text: payload.message,
}, },
accessory: accessory: payload.image
embedPoster && payload.image ? {
? { type: 'image',
type: 'image', image_url: payload.image,
image_url: payload.image, alt_text: payload.subject,
alt_text: payload.subject, }
} : undefined,
: undefined,
}); });
} }

View File

@@ -65,9 +65,7 @@ class TelegramAgent
type: Notification, type: Notification,
payload: NotificationPayload payload: NotificationPayload
): Partial<TelegramMessagePayload | TelegramPhotoPayload> { ): Partial<TelegramMessagePayload | TelegramPhotoPayload> {
const settings = getSettings(); const { applicationUrl, applicationTitle } = getSettings().main;
const { applicationUrl, applicationTitle } = settings.main;
const { embedPoster } = settings.notifications.agents.telegram;
/* eslint-disable no-useless-escape */ /* eslint-disable no-useless-escape */
let message = `\*${this.escapeText( let message = `\*${this.escapeText(
@@ -144,7 +142,7 @@ class TelegramAgent
} }
/* eslint-enable */ /* eslint-enable */
return embedPoster && payload.image return payload.image
? { ? {
photo: payload.image, photo: payload.image,
caption: message, caption: message,
@@ -162,7 +160,7 @@ class TelegramAgent
): Promise<boolean> { ): Promise<boolean> {
const settings = this.getSettings(); const settings = this.getSettings();
const endpoint = `${this.baseUrl}bot${settings.options.botAPI}/${ const endpoint = `${this.baseUrl}bot${settings.options.botAPI}/${
settings.embedPoster && payload.image ? 'sendPhoto' : 'sendMessage' payload.image ? 'sendPhoto' : 'sendMessage'
}`; }`;
const notificationPayload = this.getNotificationPayload(type, payload); const notificationPayload = this.getNotificationPayload(type, payload);

View File

@@ -177,27 +177,9 @@ class WebhookAgent
subject: payload.subject, subject: payload.subject,
}); });
let webhookUrl = settings.options.webhookUrl;
if (settings.options.supportVariables) {
Object.keys(KeyMap).forEach((keymapKey) => {
const keymapValue = KeyMap[keymapKey as keyof typeof KeyMap];
const variableValue =
type === Notification.TEST_NOTIFICATION
? 'test'
: typeof keymapValue === 'function'
? keymapValue(payload, type)
: get(payload, keymapValue) || 'test';
webhookUrl = webhookUrl.replace(
new RegExp(`{{${keymapKey}}}`, 'g'),
encodeURIComponent(variableValue)
);
});
}
try { try {
await axios.post( await axios.post(
webhookUrl, settings.options.webhookUrl,
this.buildPayload(type, payload), this.buildPayload(type, payload),
settings.options.authHeader settings.options.authHeader
? { ? {

View File

@@ -42,8 +42,6 @@ class WebPushAgent
type: Notification, type: Notification,
payload: NotificationPayload payload: NotificationPayload
): PushNotificationPayload { ): PushNotificationPayload {
const { embedPoster } = getSettings().notifications.agents.webpush;
const mediaType = payload.media const mediaType = payload.media
? payload.media.mediaType === MediaType.MOVIE ? payload.media.mediaType === MediaType.MOVIE
? 'movie' ? 'movie'
@@ -130,7 +128,7 @@ class WebPushAgent
notificationType: Notification[type], notificationType: Notification[type],
subject: payload.subject, subject: payload.subject,
message, message,
image: embedPoster ? payload.image : undefined, image: payload.image,
requestId: payload.request?.id, requestId: payload.request?.id,
actionUrl, actionUrl,
actionUrlTitle, actionUrlTitle,

View File

@@ -1,13 +1,7 @@
import animeList from '@server/api/animelist';
import type { JellyfinLibraryItem } from '@server/api/jellyfin'; import type { JellyfinLibraryItem } from '@server/api/jellyfin';
import JellyfinAPI from '@server/api/jellyfin'; import JellyfinAPI from '@server/api/jellyfin';
import { getMetadataProvider } from '@server/api/metadata';
import TheMovieDb from '@server/api/themoviedb'; import TheMovieDb from '@server/api/themoviedb';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants'; import type { TmdbTvDetails } from '@server/api/themoviedb/interfaces';
import type {
TmdbKeyword,
TmdbTvDetails,
} from '@server/api/themoviedb/interfaces';
import { MediaStatus, MediaType } from '@server/constants/media'; import { MediaStatus, MediaType } from '@server/constants/media';
import { MediaServerType } from '@server/constants/server'; import { MediaServerType } from '@server/constants/server';
import { getRepository } from '@server/datasource'; import { getRepository } from '@server/datasource';
@@ -46,11 +40,9 @@ class JellyfinScanner {
private enable4kMovie = false; private enable4kMovie = false;
private enable4kShow = false; private enable4kShow = false;
private asyncLock = new AsyncLock(); private asyncLock = new AsyncLock();
private processedAnidbSeason: Map<number, Map<number, number>>;
constructor({ isRecentOnly }: { isRecentOnly?: boolean } = {}) { constructor({ isRecentOnly }: { isRecentOnly?: boolean } = {}) {
this.tmdb = new TheMovieDb(); this.tmdb = new TheMovieDb();
this.isRecentOnly = isRecentOnly ?? false; this.isRecentOnly = isRecentOnly ?? false;
} }
@@ -68,7 +60,7 @@ class JellyfinScanner {
const mediaRepository = getRepository(Media); const mediaRepository = getRepository(Media);
try { try {
let metadata = await this.jfClient.getItemData(jellyfinitem.Id); const metadata = await this.jfClient.getItemData(jellyfinitem.Id);
const newMedia = new Media(); const newMedia = new Media();
if (!metadata?.Id) { if (!metadata?.Id) {
@@ -79,18 +71,8 @@ class JellyfinScanner {
return; return;
} }
const anidbId = Number(metadata.ProviderIds.AniDB ?? null);
newMedia.tmdbId = Number(metadata.ProviderIds.Tmdb ?? null); newMedia.tmdbId = Number(metadata.ProviderIds.Tmdb ?? null);
newMedia.imdbId = metadata.ProviderIds.Imdb; newMedia.imdbId = metadata.ProviderIds.Imdb;
// We use anidb only if we have the anidbId and nothing else
if (anidbId && !newMedia.imdbId && !newMedia.tmdbId) {
const result = animeList.getFromAnidbId(anidbId);
newMedia.tmdbId = Number(result?.tmdbId ?? null);
newMedia.imdbId = result?.imdbId;
}
if (newMedia.imdbId && !isNaN(newMedia.tmdbId)) { if (newMedia.imdbId && !isNaN(newMedia.tmdbId)) {
const tmdbMovie = await this.tmdb.getMediaByImdbId({ const tmdbMovie = await this.tmdb.getMediaByImdbId({
imdbId: newMedia.imdbId, imdbId: newMedia.imdbId,
@@ -101,40 +83,6 @@ class JellyfinScanner {
throw new Error('Unable to find TMDb ID'); throw new Error('Unable to find TMDb ID');
} }
// With AniDB we can have mixed libraries with movies in a "show" library
// We take the first episode of the first season (the movie) and use it to
// get more information, like the MediaSource
if (anidbId && metadata.Type === 'Series') {
const season = (await this.jfClient.getSeasons(jellyfinitem.Id)).find(
(md) => {
return md.IndexNumber === 1;
}
);
if (!season) {
this.log('No season found for anidb movie', 'debug', {
jellyfinitem,
});
return;
}
const episodes = await this.jfClient.getEpisodes(
jellyfinitem.Id,
season.Id
);
if (!episodes[0]) {
this.log('No episode found for anidb movie', 'debug', {
jellyfinitem,
});
return;
}
metadata = await this.jfClient.getItemData(episodes[0].Id);
if (!metadata) {
this.log('No metadata found for anidb movie', 'debug', {
jellyfinitem,
});
return;
}
}
const has4k = metadata.MediaSources?.some((MediaSource) => { const has4k = metadata.MediaSources?.some((MediaSource) => {
return MediaSource.MediaStreams.filter( return MediaSource.MediaStreams.filter(
(MediaStream) => MediaStream.Type === 'Video' (MediaStream) => MediaStream.Type === 'Video'
@@ -152,12 +100,6 @@ class JellyfinScanner {
}); });
await this.asyncLock.dispatch(newMedia.tmdbId, async () => { await this.asyncLock.dispatch(newMedia.tmdbId, async () => {
if (!metadata) {
// this will never execute, but typescript thinks somebody could reset tvShow from
// outer scope back to null before this async gets called
return;
}
const existing = await this.getExisting( const existing = await this.getExisting(
newMedia.tmdbId, newMedia.tmdbId,
MediaType.MOVIE MediaType.MOVIE
@@ -250,42 +192,6 @@ class JellyfinScanner {
} }
} }
private async getTvShow({
tmdbId,
tvdbId,
}: {
tmdbId?: number;
tvdbId?: number;
}): Promise<TmdbTvDetails> {
let tvShow;
if (tmdbId) {
tvShow = await this.tmdb.getTvShow({
tvId: Number(tmdbId),
});
} else if (tvdbId) {
tvShow = await this.tmdb.getShowByTvdbId({
tvdbId: Number(tvdbId),
});
} else {
throw new Error('No ID provided');
}
const metadataProvider = tvShow.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
)
? await getMetadataProvider('anime')
: await getMetadataProvider('tv');
if (!(metadataProvider instanceof TheMovieDb)) {
tvShow = await metadataProvider.getTvShow({
tvId: Number(tmdbId),
});
}
return tvShow;
}
private async processShow(jellyfinitem: JellyfinLibraryItem) { private async processShow(jellyfinitem: JellyfinLibraryItem) {
const mediaRepository = getRepository(Media); const mediaRepository = getRepository(Media);
@@ -306,8 +212,8 @@ class JellyfinScanner {
if (metadata.ProviderIds.Tmdb) { if (metadata.ProviderIds.Tmdb) {
try { try {
tvShow = await this.getTvShow({ tvShow = await this.tmdb.getTvShow({
tmdbId: Number(metadata.ProviderIds.Tmdb), tvId: Number(metadata.ProviderIds.Tmdb),
}); });
} catch { } catch {
this.log('Unable to find TMDb ID for this title.', 'debug', { this.log('Unable to find TMDb ID for this title.', 'debug', {
@@ -317,7 +223,7 @@ class JellyfinScanner {
} }
if (!tvShow && metadata.ProviderIds.Tvdb) { if (!tvShow && metadata.ProviderIds.Tvdb) {
try { try {
tvShow = await this.getTvShow({ tvShow = await this.tmdb.getShowByTvdbId({
tvdbId: Number(metadata.ProviderIds.Tvdb), tvdbId: Number(metadata.ProviderIds.Tvdb),
}); });
} catch { } catch {
@@ -326,28 +232,6 @@ class JellyfinScanner {
}); });
} }
} }
let tvdbSeasonFromAnidb: number | undefined;
if (!tvShow && metadata.ProviderIds.AniDB) {
const anidbId = Number(metadata.ProviderIds.AniDB);
const result = animeList.getFromAnidbId(anidbId);
tvdbSeasonFromAnidb = result?.tvdbSeason;
if (result?.tvdbId) {
try {
tvShow = await this.tmdb.getShowByTvdbId({
tvdbId: result.tvdbId,
});
} catch {
this.log('Unable to find AniDB ID for this title.', 'debug', {
jellyfinitem,
});
}
}
// With AniDB we can have mixed libraries with movies in a "show" library
else if (result?.imdbId || result?.tmdbId) {
await this.processMovie(jellyfinitem);
return;
}
}
if (tvShow) { if (tvShow) {
await this.asyncLock.dispatch(tvShow.id, async () => { await this.asyncLock.dispatch(tvShow.id, async () => {
@@ -376,20 +260,9 @@ class JellyfinScanner {
for (const season of seasons) { for (const season of seasons) {
const JellyfinSeasons = await this.jfClient.getSeasons(Id); const JellyfinSeasons = await this.jfClient.getSeasons(Id);
const matchedJellyfinSeason = JellyfinSeasons.find((md) => { const matchedJellyfinSeason = JellyfinSeasons.find(
if (tvdbSeasonFromAnidb) { (md) => Number(md.IndexNumber) === season.season_number
// In AniDB we don't have the concept of seasons, );
// we have multiple shows with only Season 1 (and sometimes a season with index 0 for specials).
// We use tvdbSeasonFromAnidb to check if we are on the correct TMDB season and
// md.IndexNumber === 1 to be sure to find the correct season on jellyfin
return (
tvdbSeasonFromAnidb === season.season_number &&
md.IndexNumber === 1
);
} else {
return Number(md.IndexNumber) === season.season_number;
}
});
const existingSeason = media?.seasons.find( const existingSeason = media?.seasons.find(
(es) => es.seasonNumber === season.season_number (es) => es.seasonNumber === season.season_number
@@ -442,29 +315,6 @@ class JellyfinScanner {
} }
} }
// With AniDB we can have multiple shows for one season, so we need to save
// the episode from all the jellyfin entries to get the total
if (tvdbSeasonFromAnidb) {
if (this.processedAnidbSeason.has(tvShow.id)) {
const show = this.processedAnidbSeason.get(tvShow.id)!;
if (show.has(season.season_number)) {
show.set(
season.season_number,
show.get(season.season_number)! + totalStandard
);
totalStandard = show.get(season.season_number)!;
} else {
show.set(season.season_number, totalStandard);
}
} else {
this.processedAnidbSeason.set(
tvShow.id,
new Map([[season.season_number, totalStandard]])
);
}
}
if ( if (
media && media &&
(totalStandard > 0 || (total4k > 0 && !this.enable4kShow)) && (totalStandard > 0 || (total4k > 0 && !this.enable4kShow)) &&
@@ -677,7 +527,6 @@ class JellyfinScanner {
} }
private async processItems(slicedItems: JellyfinLibraryItem[]) { private async processItems(slicedItems: JellyfinLibraryItem[]) {
this.processedAnidbSeason = new Map();
await Promise.all( await Promise.all(
slicedItems.map(async (item) => { slicedItems.map(async (item) => {
if (item.Type === 'Movie') { if (item.Type === 'Movie') {
@@ -775,8 +624,6 @@ class JellyfinScanner {
(library) => library.enabled (library) => library.enabled
); );
await animeList.sync();
this.enable4kMovie = settings.radarr.some((radarr) => radarr.is4k); this.enable4kMovie = settings.radarr.some((radarr) => radarr.is4k);
if (this.enable4kMovie) { if (this.enable4kMovie) {
this.log( this.log(

View File

@@ -1,13 +1,7 @@
import animeList from '@server/api/animelist'; import animeList from '@server/api/animelist';
import { getMetadataProvider } from '@server/api/metadata';
import type { PlexLibraryItem, PlexMetadata } from '@server/api/plexapi'; import type { PlexLibraryItem, PlexMetadata } from '@server/api/plexapi';
import PlexAPI from '@server/api/plexapi'; import PlexAPI from '@server/api/plexapi';
import TheMovieDb from '@server/api/themoviedb'; import type { TmdbTvDetails } from '@server/api/themoviedb/interfaces';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
import type {
TmdbKeyword,
TmdbTvDetails,
} from '@server/api/themoviedb/interfaces';
import { getRepository } from '@server/datasource'; import { getRepository } from '@server/datasource';
import { User } from '@server/entity/User'; import { User } from '@server/entity/User';
import cacheManager from '@server/lib/cache'; import cacheManager from '@server/lib/cache';
@@ -255,42 +249,6 @@ class PlexScanner
}); });
} }
private async getTvShow({
tmdbId,
tvdbId,
}: {
tmdbId?: number;
tvdbId?: number;
}): Promise<TmdbTvDetails> {
let tvShow;
if (tmdbId) {
tvShow = await this.tmdb.getTvShow({
tvId: Number(tmdbId),
});
} else if (tvdbId) {
tvShow = await this.tmdb.getShowByTvdbId({
tvdbId: Number(tvdbId),
});
} else {
throw new Error('No ID provided');
}
const metadataProvider = tvShow.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
)
? await getMetadataProvider('anime')
: await getMetadataProvider('tv');
if (!(metadataProvider instanceof TheMovieDb)) {
tvShow = await metadataProvider.getTvShow({
tvId: Number(tmdbId),
});
}
return tvShow;
}
private async processPlexShow(plexitem: PlexLibraryItem) { private async processPlexShow(plexitem: PlexLibraryItem) {
const ratingKey = const ratingKey =
plexitem.grandparentRatingKey ?? plexitem.grandparentRatingKey ??
@@ -315,9 +273,7 @@ class PlexScanner
await this.processHamaSpecials(metadata, mediaIds.tvdbId); await this.processHamaSpecials(metadata, mediaIds.tvdbId);
} }
const tvShow = await this.getTvShow({ const tvShow = await this.tmdb.getTvShow({ tvId: mediaIds.tmdbId });
tmdbId: mediaIds.tmdbId,
});
const seasons = tvShow.seasons; const seasons = tvShow.seasons;
const processableSeasons: ProcessableSeason[] = []; const processableSeasons: ProcessableSeason[] = [];

View File

@@ -100,16 +100,6 @@ interface Quota {
quotaDays?: number; quotaDays?: number;
} }
export enum MetadataProviderType {
TMDB = 'tmdb',
TVDB = 'tvdb',
}
export interface MetadataSettings {
tv: MetadataProviderType;
anime: MetadataProviderType;
}
export interface ProxySettings { export interface ProxySettings {
enabled: boolean; enabled: boolean;
hostname: string; hostname: string;
@@ -148,29 +138,10 @@ export interface MainSettings {
youtubeUrl: string; youtubeUrl: string;
} }
export interface ProxySettings {
enabled: boolean;
hostname: string;
port: number;
useSsl: boolean;
user: string;
password: string;
bypassFilter: string;
bypassLocalAddresses: boolean;
}
export interface DnsCacheSettings {
enabled: boolean;
forceMinTtl?: number;
forceMaxTtl?: number;
}
export interface NetworkSettings { export interface NetworkSettings {
csrfProtection: boolean; csrfProtection: boolean;
forceIpv4First: boolean;
trustProxy: boolean; trustProxy: boolean;
proxy: ProxySettings; proxy: ProxySettings;
dnsCache: DnsCacheSettings;
} }
interface PublicSettings { interface PublicSettings {
@@ -207,7 +178,6 @@ interface FullPublicSettings extends PublicSettings {
export interface NotificationAgentConfig { export interface NotificationAgentConfig {
enabled: boolean; enabled: boolean;
embedPoster: boolean;
types?: number; types?: number;
options: Record<string, unknown>; options: Record<string, unknown>;
} }
@@ -245,6 +215,13 @@ export interface NotificationAgentEmail extends NotificationAgentConfig {
}; };
} }
export interface NotificationAgentLunaSea extends NotificationAgentConfig {
options: {
webhookUrl: string;
profileName?: string;
};
}
export interface NotificationAgentTelegram extends NotificationAgentConfig { export interface NotificationAgentTelegram extends NotificationAgentConfig {
options: { options: {
botUsername?: string; botUsername?: string;
@@ -275,7 +252,6 @@ export interface NotificationAgentWebhook extends NotificationAgentConfig {
webhookUrl: string; webhookUrl: string;
jsonPayload: string; jsonPayload: string;
authHeader?: string; authHeader?: string;
supportVariables?: boolean;
}; };
} }
@@ -317,6 +293,7 @@ interface NotificationAgents {
email: NotificationAgentEmail; email: NotificationAgentEmail;
gotify: NotificationAgentGotify; gotify: NotificationAgentGotify;
ntfy: NotificationAgentNtfy; ntfy: NotificationAgentNtfy;
lunasea: NotificationAgentLunaSea;
pushbullet: NotificationAgentPushbullet; pushbullet: NotificationAgentPushbullet;
pushover: NotificationAgentPushover; pushover: NotificationAgentPushover;
slack: NotificationAgentSlack; slack: NotificationAgentSlack;
@@ -362,8 +339,6 @@ export interface AllSettings {
notifications: NotificationSettings; notifications: NotificationSettings;
jobs: Record<JobId, JobSettings>; jobs: Record<JobId, JobSettings>;
network: NetworkSettings; network: NetworkSettings;
metadataSettings: MetadataSettings;
migrations: string[];
} }
const SETTINGS_PATH = process.env.CONFIG_DIRECTORY const SETTINGS_PATH = process.env.CONFIG_DIRECTORY
@@ -424,10 +399,6 @@ class Settings {
apiKey: '', apiKey: '',
}, },
tautulli: {}, tautulli: {},
metadataSettings: {
tv: MetadataProviderType.TMDB,
anime: MetadataProviderType.TMDB,
},
radarr: [], radarr: [],
sonarr: [], sonarr: [],
public: { public: {
@@ -437,7 +408,6 @@ class Settings {
agents: { agents: {
email: { email: {
enabled: false, enabled: false,
embedPoster: true,
options: { options: {
userEmailRequired: false, userEmailRequired: false,
emailFrom: '', emailFrom: '',
@@ -452,7 +422,6 @@ class Settings {
}, },
discord: { discord: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
webhookUrl: '', webhookUrl: '',
@@ -460,9 +429,15 @@ class Settings {
enableMentions: true, enableMentions: true,
}, },
}, },
lunasea: {
enabled: false,
types: 0,
options: {
webhookUrl: '',
},
},
slack: { slack: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
webhookUrl: '', webhookUrl: '',
@@ -470,7 +445,6 @@ class Settings {
}, },
telegram: { telegram: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
botAPI: '', botAPI: '',
@@ -481,7 +455,6 @@ class Settings {
}, },
pushbullet: { pushbullet: {
enabled: false, enabled: false,
embedPoster: false,
types: 0, types: 0,
options: { options: {
accessToken: '', accessToken: '',
@@ -489,7 +462,6 @@ class Settings {
}, },
pushover: { pushover: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
accessToken: '', accessToken: '',
@@ -499,7 +471,6 @@ class Settings {
}, },
webhook: { webhook: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
webhookUrl: '', webhookUrl: '',
@@ -509,12 +480,10 @@ class Settings {
}, },
webpush: { webpush: {
enabled: false, enabled: false,
embedPoster: true,
options: {}, options: {},
}, },
gotify: { gotify: {
enabled: false, enabled: false,
embedPoster: false,
types: 0, types: 0,
options: { options: {
url: '', url: '',
@@ -524,7 +493,6 @@ class Settings {
}, },
ntfy: { ntfy: {
enabled: false, enabled: false,
embedPoster: true,
types: 0, types: 0,
options: { options: {
url: '', url: '',
@@ -576,7 +544,6 @@ class Settings {
}, },
network: { network: {
csrfProtection: false, csrfProtection: false,
forceIpv4First: false,
trustProxy: false, trustProxy: false,
proxy: { proxy: {
enabled: false, enabled: false,
@@ -588,13 +555,7 @@ class Settings {
bypassFilter: '', bypassFilter: '',
bypassLocalAddresses: true, bypassLocalAddresses: true,
}, },
dnsCache: {
enabled: false,
forceMinTtl: 0,
forceMaxTtl: -1,
},
}, },
migrations: [],
}; };
if (initialSettings) { if (initialSettings) {
this.data = merge(this.data, initialSettings); this.data = merge(this.data, initialSettings);
@@ -633,14 +594,6 @@ class Settings {
this.data.tautulli = data; this.data.tautulli = data;
} }
get metadataSettings(): MetadataSettings {
return this.data.metadataSettings;
}
set metadataSettings(data: MetadataSettings) {
this.data.metadataSettings = data;
}
get radarr(): RadarrSettings[] { get radarr(): RadarrSettings[] {
return this.data.radarr; return this.data.radarr;
} }
@@ -724,14 +677,6 @@ class Settings {
this.data.network = data; this.data.network = data;
} }
get migrations(): string[] {
return this.data.migrations;
}
set migrations(data: string[]) {
this.data.migrations = data;
}
get clientId(): string { get clientId(): string {
return this.data.clientId; return this.data.clientId;
} }

View File

@@ -1,14 +0,0 @@
import type { AllSettings } from '@server/lib/settings';
const removeLunaSeaSetting = (settings: any): AllSettings => {
if (
settings.notifications &&
settings.notifications.agents &&
settings.notifications.agents.lunasea
) {
delete settings.notifications.agents.lunasea;
}
return settings;
};
export default removeLunaSeaSetting;

View File

@@ -1,93 +0,0 @@
import RadarrAPI from '@server/api/servarr/radarr';
import SonarrAPI from '@server/api/servarr/sonarr';
import { getRepository } from '@server/datasource';
import { User } from '@server/entity/User';
import type { AllSettings } from '@server/lib/settings';
const migrationArrTags = async (settings: any): Promise<AllSettings> => {
if (
Array.isArray(settings.migrations) &&
settings.migrations.includes('0007_migrate_arr_tags')
) {
return settings;
}
const userRepository = getRepository(User);
const users = await userRepository.find({
select: ['id'],
});
let errorOccurred = false;
for (const radarrSettings of settings.radarr || []) {
if (!radarrSettings.tagRequests) {
continue;
}
try {
const radarr = new RadarrAPI({
apiKey: radarrSettings.apiKey,
url: RadarrAPI.buildUrl(radarrSettings, '/api/v3'),
});
const radarrTags = await radarr.getTags();
for (const user of users) {
const userTag = radarrTags.find((v) =>
v.label.startsWith(user.id + ' - ')
);
if (!userTag) {
continue;
}
await radarr.renameTag({
id: userTag.id,
label: userTag.label.replace(`${user.id} - `, `${user.id}-`),
});
}
} catch (error) {
console.error(
`Unable to rename Radarr tags to the new format. Please check your Radarr connection settings for the instance "${radarrSettings.name}".`,
error.message
);
errorOccurred = true;
}
}
for (const sonarrSettings of settings.sonarr || []) {
if (!sonarrSettings.tagRequests) {
continue;
}
try {
const sonarr = new SonarrAPI({
apiKey: sonarrSettings.apiKey,
url: SonarrAPI.buildUrl(sonarrSettings, '/api/v3'),
});
const sonarrTags = await sonarr.getTags();
for (const user of users) {
const userTag = sonarrTags.find((v) =>
v.label.startsWith(user.id + ' - ')
);
if (!userTag) {
continue;
}
await sonarr.renameTag({
id: userTag.id,
label: userTag.label.replace(`${user.id} - `, `${user.id}-`),
});
}
} catch (error) {
console.error(
`Unable to rename Sonarr tags to the new format. Please check your Sonarr connection settings for the instance "${sonarrSettings.name}".`,
error.message
);
errorOccurred = true;
}
}
if (!errorOccurred) {
if (!Array.isArray(settings.migrations)) {
settings.migrations = [];
}
settings.migrations.push('0007_migrate_arr_tags');
}
return settings;
};
export default migrationArrTags;

View File

@@ -124,7 +124,7 @@ const mapEpisodeResult = (episode: TmdbTvEpisodeResult): Episode => ({
seasonNumber: episode.season_number, seasonNumber: episode.season_number,
showId: episode.show_id, showId: episode.show_id,
voteAverage: episode.vote_average, voteAverage: episode.vote_average,
voteCount: episode.vote_count, voteCount: episode.vote_cuont,
stillPath: episode.still_path, stillPath: episode.still_path,
}); });

View File

@@ -61,7 +61,6 @@ const QueryFilterOptions = z.object({
studio: z.coerce.string().optional(), studio: z.coerce.string().optional(),
genre: z.coerce.string().optional(), genre: z.coerce.string().optional(),
keywords: z.coerce.string().optional(), keywords: z.coerce.string().optional(),
excludeKeywords: z.coerce.string().optional(),
language: z.coerce.string().optional(), language: z.coerce.string().optional(),
withRuntimeGte: z.coerce.string().optional(), withRuntimeGte: z.coerce.string().optional(),
withRuntimeLte: z.coerce.string().optional(), withRuntimeLte: z.coerce.string().optional(),
@@ -91,7 +90,6 @@ discoverRoutes.get('/movies', async (req, res, next) => {
try { try {
const query = ApiQuerySchema.parse(req.query); const query = ApiQuerySchema.parse(req.query);
const keywords = query.keywords; const keywords = query.keywords;
const excludeKeywords = query.excludeKeywords;
const data = await tmdb.getDiscoverMovies({ const data = await tmdb.getDiscoverMovies({
page: Number(query.page), page: Number(query.page),
@@ -107,7 +105,6 @@ discoverRoutes.get('/movies', async (req, res, next) => {
? new Date(query.primaryReleaseDateGte).toISOString().split('T')[0] ? new Date(query.primaryReleaseDateGte).toISOString().split('T')[0]
: undefined, : undefined,
keywords, keywords,
excludeKeywords,
withRuntimeGte: query.withRuntimeGte, withRuntimeGte: query.withRuntimeGte,
withRuntimeLte: query.withRuntimeLte, withRuntimeLte: query.withRuntimeLte,
voteAverageGte: query.voteAverageGte, voteAverageGte: query.voteAverageGte,
@@ -131,15 +128,11 @@ discoverRoutes.get('/movies', async (req, res, next) => {
if (keywords) { if (keywords) {
const splitKeywords = keywords.split(','); const splitKeywords = keywords.split(',');
const keywordResults = await Promise.all( keywordData = await Promise.all(
splitKeywords.map(async (keywordId) => { splitKeywords.map(async (keywordId) => {
return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) }); return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) });
}) })
); );
keywordData = keywordResults.filter(
(keyword): keyword is TmdbKeyword => keyword !== null
);
} }
return res.status(200).json({ return res.status(200).json({
@@ -384,7 +377,6 @@ discoverRoutes.get('/tv', async (req, res, next) => {
try { try {
const query = ApiQuerySchema.parse(req.query); const query = ApiQuerySchema.parse(req.query);
const keywords = query.keywords; const keywords = query.keywords;
const excludeKeywords = query.excludeKeywords;
const data = await tmdb.getDiscoverTv({ const data = await tmdb.getDiscoverTv({
page: Number(query.page), page: Number(query.page),
sortBy: query.sortBy as SortOptions, sortBy: query.sortBy as SortOptions,
@@ -399,7 +391,6 @@ discoverRoutes.get('/tv', async (req, res, next) => {
: undefined, : undefined,
originalLanguage: query.language, originalLanguage: query.language,
keywords, keywords,
excludeKeywords,
withRuntimeGte: query.withRuntimeGte, withRuntimeGte: query.withRuntimeGte,
withRuntimeLte: query.withRuntimeLte, withRuntimeLte: query.withRuntimeLte,
voteAverageGte: query.voteAverageGte, voteAverageGte: query.voteAverageGte,
@@ -424,15 +415,11 @@ discoverRoutes.get('/tv', async (req, res, next) => {
if (keywords) { if (keywords) {
const splitKeywords = keywords.split(','); const splitKeywords = keywords.split(',');
const keywordResults = await Promise.all( keywordData = await Promise.all(
splitKeywords.map(async (keywordId) => { splitKeywords.map(async (keywordId) => {
return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) }); return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) });
}) })
); );
keywordData = keywordResults.filter(
(keyword): keyword is TmdbKeyword => keyword !== null
);
} }
return res.status(200).json({ return res.status(200).json({

View File

@@ -4,40 +4,27 @@ import { Router } from 'express';
const router = Router(); const router = Router();
// Delay the initialization of ImageProxy instances until the proxy (if any) is properly configured const tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', {
let _tmdbImageProxy: ImageProxy; rateLimitOptions: {
function initTmdbImageProxy() { maxRequests: 20,
if (!_tmdbImageProxy) { maxRPS: 50,
_tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', { },
rateLimitOptions: { });
maxRequests: 20, const tvdbImageProxy = new ImageProxy('tvdb', 'https://artworks.thetvdb.com', {
maxRPS: 50, rateLimitOptions: {
}, maxRequests: 20,
}); maxRPS: 50,
} },
return _tmdbImageProxy; });
}
let _tvdbImageProxy: ImageProxy;
function initTvdbImageProxy() {
if (!_tvdbImageProxy) {
_tvdbImageProxy = new ImageProxy('tvdb', 'https://artworks.thetvdb.com', {
rateLimitOptions: {
maxRequests: 20,
maxRPS: 50,
},
});
}
return _tvdbImageProxy;
}
router.get('/:type/*', async (req, res) => { router.get('/:type/*', async (req, res) => {
const imagePath = req.path.replace(/^\/\w+/, ''); const imagePath = req.path.replace(/^\/\w+/, '');
try { try {
let imageData; let imageData;
if (req.params.type === 'tmdb') { if (req.params.type === 'tmdb') {
imageData = await initTmdbImageProxy().getImage(imagePath); imageData = await tmdbImageProxy.getImage(imagePath);
} else if (req.params.type === 'tvdb') { } else if (req.params.type === 'tvdb') {
imageData = await initTvdbImageProxy().getImage(imagePath); imageData = await tvdbImageProxy.getImage(imagePath);
} else { } else {
logger.error('Unsupported image type', { logger.error('Unsupported image type', {
imagePath, imagePath,

View File

@@ -54,7 +54,6 @@ issueRoutes.get<Record<string, string>, IssueResultsResponse>(
.leftJoinAndSelect('issue.createdBy', 'createdBy') .leftJoinAndSelect('issue.createdBy', 'createdBy')
.leftJoinAndSelect('issue.media', 'media') .leftJoinAndSelect('issue.media', 'media')
.leftJoinAndSelect('issue.modifiedBy', 'modifiedBy') .leftJoinAndSelect('issue.modifiedBy', 'modifiedBy')
.leftJoinAndSelect('issue.comments', 'comments')
.where('issue.status IN (:...issueStatus)', { .where('issue.status IN (:...issueStatus)', {
issueStatus: statusFilter, issueStatus: statusFilter,
}); });

View File

@@ -197,10 +197,8 @@ mediaRoutes.delete(
const media = await mediaRepository.findOneOrFail({ const media = await mediaRepository.findOneOrFail({
where: { id: Number(req.params.id) }, where: { id: Number(req.params.id) },
}); });
const is4k = media.serviceUrl4k !== undefined;
const is4k = req.query.is4k === 'true';
const isMovie = media.mediaType === MediaType.MOVIE; const isMovie = media.mediaType === MediaType.MOVIE;
let serviceSettings; let serviceSettings;
if (isMovie) { if (isMovie) {
serviceSettings = settings.radarr.find( serviceSettings = settings.radarr.find(
@@ -227,7 +225,6 @@ mediaRoutes.delete(
); );
} }
} }
if (!serviceSettings) { if (!serviceSettings) {
logger.warn( logger.warn(
`There is no default ${ `There is no default ${
@@ -242,7 +239,6 @@ mediaRoutes.delete(
); );
return; return;
} }
let service; let service;
if (isMovie) { if (isMovie) {
service = new RadarrAPI({ service = new RadarrAPI({

View File

@@ -381,12 +381,6 @@ requestRoutes.get('/count', async (_req, res, next) => {
) )
.getCount(); .getCount();
const completedCount = await query
.where('request.status = :requestStatus', {
requestStatus: MediaRequestStatus.COMPLETED,
})
.getCount();
return res.status(200).json({ return res.status(200).json({
total: totalCount, total: totalCount,
movie: movieCount, movie: movieCount,
@@ -396,7 +390,6 @@ requestRoutes.get('/count', async (_req, res, next) => {
declined: declinedCount, declined: declinedCount,
processing: processingCount, processing: processingCount,
available: availableCount, available: availableCount,
completed: completedCount,
}); });
} catch (e) { } catch (e) {
logger.error('Something went wrong retrieving request counts', { logger.error('Something went wrong retrieving request counts', {

View File

@@ -28,9 +28,7 @@ import discoverSettingRoutes from '@server/routes/settings/discover';
import { ApiError } from '@server/types/error'; import { ApiError } from '@server/types/error';
import { appDataPath } from '@server/utils/appDataVolume'; import { appDataPath } from '@server/utils/appDataVolume';
import { getAppVersion } from '@server/utils/appVersion'; import { getAppVersion } from '@server/utils/appVersion';
import { dnsCache } from '@server/utils/dnsCache';
import { getHostname } from '@server/utils/getHostname'; import { getHostname } from '@server/utils/getHostname';
import type { DnsEntries, DnsStats } from 'dns-caching';
import { Router } from 'express'; import { Router } from 'express';
import rateLimit from 'express-rate-limit'; import rateLimit from 'express-rate-limit';
import fs from 'fs'; import fs from 'fs';
@@ -39,7 +37,6 @@ import { rescheduleJob } from 'node-schedule';
import path from 'path'; import path from 'path';
import semver from 'semver'; import semver from 'semver';
import { URL } from 'url'; import { URL } from 'url';
import metadataRoutes from './metadata';
import notificationRoutes from './notifications'; import notificationRoutes from './notifications';
import radarrRoutes from './radarr'; import radarrRoutes from './radarr';
import sonarrRoutes from './sonarr'; import sonarrRoutes from './sonarr';
@@ -50,7 +47,6 @@ settingsRoutes.use('/notifications', notificationRoutes);
settingsRoutes.use('/radarr', radarrRoutes); settingsRoutes.use('/radarr', radarrRoutes);
settingsRoutes.use('/sonarr', sonarrRoutes); settingsRoutes.use('/sonarr', sonarrRoutes);
settingsRoutes.use('/discover', discoverSettingRoutes); settingsRoutes.use('/discover', discoverSettingRoutes);
settingsRoutes.use('/metadatas', metadataRoutes);
const filteredMainSettings = ( const filteredMainSettings = (
user: User, user: User,
@@ -759,19 +755,12 @@ settingsRoutes.get('/cache', async (_req, res) => {
const tmdbImageCache = await ImageProxy.getImageStats('tmdb'); const tmdbImageCache = await ImageProxy.getImageStats('tmdb');
const avatarImageCache = await ImageProxy.getImageStats('avatar'); const avatarImageCache = await ImageProxy.getImageStats('avatar');
const stats: DnsStats | undefined = dnsCache?.getStats();
const entries: DnsEntries | undefined = dnsCache?.getCacheEntries();
return res.status(200).json({ return res.status(200).json({
apiCaches, apiCaches,
imageCache: { imageCache: {
tmdb: tmdbImageCache, tmdb: tmdbImageCache,
avatar: avatarImageCache, avatar: avatarImageCache,
}, },
dnsCache: {
stats,
entries,
},
}); });
}); });
@@ -789,20 +778,6 @@ settingsRoutes.post<{ cacheId: AvailableCacheIds }>(
} }
); );
settingsRoutes.post<{ dnsEntry: string }>(
'/cache/dns/:dnsEntry/flush',
(req, res, next) => {
const dnsEntry = req.params.dnsEntry;
if (dnsCache) {
dnsCache.clear(dnsEntry);
return res.status(204).send();
}
next({ status: 404, message: 'Cache not found.' });
}
);
settingsRoutes.post( settingsRoutes.post(
'/initialize', '/initialize',
isAuthenticated(Permission.ADMIN), isAuthenticated(Permission.ADMIN),

View File

@@ -1,153 +0,0 @@
import TheMovieDb from '@server/api/themoviedb';
import Tvdb from '@server/api/tvdb';
import {
getSettings,
MetadataProviderType,
type MetadataSettings,
} from '@server/lib/settings';
import logger from '@server/logger';
import { Router } from 'express';
function getTestResultString(testValue: number): string {
if (testValue === -1) return 'not tested';
if (testValue === 0) return 'failed';
return 'ok';
}
const metadataRoutes = Router();
metadataRoutes.get('/', (_req, res) => {
const settings = getSettings();
res.status(200).json({
tv: settings.metadataSettings.tv,
anime: settings.metadataSettings.anime,
});
});
metadataRoutes.put('/', async (req, res) => {
const settings = getSettings();
const body = req.body as MetadataSettings;
let tvdbTest = -1;
let tmdbTest = -1;
try {
if (
body.tv === MetadataProviderType.TVDB ||
body.anime === MetadataProviderType.TVDB
) {
tvdbTest = 0;
const tvdb = await Tvdb.getInstance();
await tvdb.test();
tvdbTest = 1;
}
} catch (e) {
logger.error('Failed to test metadata provider', {
label: 'Metadata',
message: e.message,
});
}
try {
if (
body.tv === MetadataProviderType.TMDB ||
body.anime === MetadataProviderType.TMDB
) {
tmdbTest = 0;
const tmdb = new TheMovieDb();
await tmdb.getTvShow({ tvId: 1054 });
tmdbTest = 1;
}
} catch (e) {
logger.error('Failed to test metadata provider', {
label: 'MetadataProvider',
message: e.message,
});
}
// If a test failed, return the test results
if (tvdbTest === 0 || tmdbTest === 0) {
return res.status(500).json({
success: false,
tests: {
tvdb: getTestResultString(tvdbTest),
tmdb: getTestResultString(tmdbTest),
},
});
}
settings.metadataSettings = {
tv: body.tv,
anime: body.anime,
};
await settings.save();
res.status(200).json({
success: true,
tv: body.tv,
anime: body.anime,
tests: {
tvdb: getTestResultString(tvdbTest),
tmdb: getTestResultString(tmdbTest),
},
});
});
metadataRoutes.post('/test', async (req, res) => {
let tvdbTest = -1;
let tmdbTest = -1;
try {
const body = req.body as { tmdb: boolean; tvdb: boolean };
try {
if (body.tmdb) {
tmdbTest = 0;
const tmdb = new TheMovieDb();
await tmdb.getTvShow({ tvId: 1054 });
tmdbTest = 1;
}
} catch (e) {
logger.error('Failed to test metadata provider', {
label: 'MetadataProvider',
message: e.message,
});
}
try {
if (body.tvdb) {
tvdbTest = 0;
const tvdb = await Tvdb.getInstance();
await tvdb.test();
tvdbTest = 1;
}
} catch (e) {
logger.error('Failed to test metadata provider', {
label: 'MetadataProvider',
message: e.message,
});
}
const success = !(tvdbTest === 0 || tmdbTest === 0);
const statusCode = success ? 200 : 500;
return res.status(statusCode).json({
success: success,
tests: {
tmdb: getTestResultString(tmdbTest),
tvdb: getTestResultString(tvdbTest),
},
});
} catch (e) {
return res.status(500).json({
success: false,
tests: {
tmdb: getTestResultString(tmdbTest),
tvdb: getTestResultString(tvdbTest),
},
error: e.message,
});
}
});
export default metadataRoutes;

View File

@@ -4,6 +4,7 @@ import type { NotificationAgent } from '@server/lib/notifications/agents/agent';
import DiscordAgent from '@server/lib/notifications/agents/discord'; import DiscordAgent from '@server/lib/notifications/agents/discord';
import EmailAgent from '@server/lib/notifications/agents/email'; import EmailAgent from '@server/lib/notifications/agents/email';
import GotifyAgent from '@server/lib/notifications/agents/gotify'; import GotifyAgent from '@server/lib/notifications/agents/gotify';
import LunaSeaAgent from '@server/lib/notifications/agents/lunasea';
import NtfyAgent from '@server/lib/notifications/agents/ntfy'; import NtfyAgent from '@server/lib/notifications/agents/ntfy';
import PushbulletAgent from '@server/lib/notifications/agents/pushbullet'; import PushbulletAgent from '@server/lib/notifications/agents/pushbullet';
import PushoverAgent from '@server/lib/notifications/agents/pushover'; import PushoverAgent from '@server/lib/notifications/agents/pushover';
@@ -270,7 +271,6 @@ notificationRoutes.get('/webhook', (_req, res) => {
const response: typeof webhookSettings = { const response: typeof webhookSettings = {
enabled: webhookSettings.enabled, enabled: webhookSettings.enabled,
embedPoster: webhookSettings.embedPoster,
types: webhookSettings.types, types: webhookSettings.types,
options: { options: {
...webhookSettings.options, ...webhookSettings.options,
@@ -279,7 +279,6 @@ notificationRoutes.get('/webhook', (_req, res) => {
'utf8' 'utf8'
) )
), ),
supportVariables: webhookSettings.options.supportVariables ?? false,
}, },
}; };
@@ -293,7 +292,6 @@ notificationRoutes.post('/webhook', async (req, res, next) => {
settings.notifications.agents.webhook = { settings.notifications.agents.webhook = {
enabled: req.body.enabled, enabled: req.body.enabled,
embedPoster: req.body.embedPoster,
types: req.body.types, types: req.body.types,
options: { options: {
jsonPayload: Buffer.from(req.body.options.jsonPayload).toString( jsonPayload: Buffer.from(req.body.options.jsonPayload).toString(
@@ -301,7 +299,6 @@ notificationRoutes.post('/webhook', async (req, res, next) => {
), ),
webhookUrl: req.body.options.webhookUrl, webhookUrl: req.body.options.webhookUrl,
authHeader: req.body.options.authHeader, authHeader: req.body.options.authHeader,
supportVariables: req.body.options.supportVariables ?? false,
}, },
}; };
await settings.save(); await settings.save();
@@ -325,7 +322,6 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
const testBody = { const testBody = {
enabled: req.body.enabled, enabled: req.body.enabled,
embedPoster: req.body.embedPoster,
types: req.body.types, types: req.body.types,
options: { options: {
jsonPayload: Buffer.from(req.body.options.jsonPayload).toString( jsonPayload: Buffer.from(req.body.options.jsonPayload).toString(
@@ -333,7 +329,6 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
), ),
webhookUrl: req.body.options.webhookUrl, webhookUrl: req.body.options.webhookUrl,
authHeader: req.body.options.authHeader, authHeader: req.body.options.authHeader,
supportVariables: req.body.options.supportVariables ?? false,
}, },
}; };
@@ -351,6 +346,40 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
} }
}); });
notificationRoutes.get('/lunasea', (_req, res) => {
const settings = getSettings();
res.status(200).json(settings.notifications.agents.lunasea);
});
notificationRoutes.post('/lunasea', async (req, res) => {
const settings = getSettings();
settings.notifications.agents.lunasea = req.body;
await settings.save();
res.status(200).json(settings.notifications.agents.lunasea);
});
notificationRoutes.post('/lunasea/test', async (req, res, next) => {
if (!req.user) {
return next({
status: 500,
message: 'User information is missing from the request.',
});
}
const lunaseaAgent = new LunaSeaAgent(req.body);
if (await sendTestNotification(lunaseaAgent, req.user)) {
return res.status(204).send();
} else {
return next({
status: 500,
message: 'Failed to send web push notification.',
});
}
});
notificationRoutes.get('/gotify', (_req, res) => { notificationRoutes.get('/gotify', (_req, res) => {
const settings = getSettings(); const settings = getSettings();

View File

@@ -1,8 +1,5 @@
import { getMetadataProvider } from '@server/api/metadata';
import RottenTomatoes from '@server/api/rating/rottentomatoes'; import RottenTomatoes from '@server/api/rating/rottentomatoes';
import TheMovieDb from '@server/api/themoviedb'; import TheMovieDb from '@server/api/themoviedb';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
import type { TmdbKeyword } from '@server/api/themoviedb/interfaces';
import { MediaType } from '@server/constants/media'; import { MediaType } from '@server/constants/media';
import { getRepository } from '@server/datasource'; import { getRepository } from '@server/datasource';
import Media from '@server/entity/Media'; import Media from '@server/entity/Media';
@@ -16,20 +13,12 @@ const tvRoutes = Router();
tvRoutes.get('/:id', async (req, res, next) => { tvRoutes.get('/:id', async (req, res, next) => {
const tmdb = new TheMovieDb(); const tmdb = new TheMovieDb();
try { try {
const tmdbTv = await tmdb.getTvShow({ const tv = await tmdb.getTvShow({
tvId: Number(req.params.id),
});
const metadataProvider = tmdbTv.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
)
? await getMetadataProvider('anime')
: await getMetadataProvider('tv');
const tv = await metadataProvider.getTvShow({
tvId: Number(req.params.id), tvId: Number(req.params.id),
language: (req.query.language as string) ?? req.locale, language: (req.query.language as string) ?? req.locale,
}); });
const media = await Media.getMedia(tv.id, MediaType.TV); const media = await Media.getMedia(tv.id, MediaType.TV);
const onUserWatchlist = await getRepository(Watchlist).exist({ const onUserWatchlist = await getRepository(Watchlist).exist({
@@ -45,9 +34,7 @@ tvRoutes.get('/:id', async (req, res, next) => {
// TMDB issue where it doesnt fallback to English when no overview is available in requested locale. // TMDB issue where it doesnt fallback to English when no overview is available in requested locale.
if (!data.overview) { if (!data.overview) {
const tvEnglish = await metadataProvider.getTvShow({ const tvEnglish = await tmdb.getTvShow({ tvId: Number(req.params.id) });
tvId: Number(req.params.id),
});
data.overview = tvEnglish.overview; data.overview = tvEnglish.overview;
} }
@@ -66,18 +53,10 @@ tvRoutes.get('/:id', async (req, res, next) => {
}); });
tvRoutes.get('/:id/season/:seasonNumber', async (req, res, next) => { tvRoutes.get('/:id/season/:seasonNumber', async (req, res, next) => {
try { const tmdb = new TheMovieDb();
const tmdb = new TheMovieDb();
const tmdbTv = await tmdb.getTvShow({
tvId: Number(req.params.id),
});
const metadataProvider = tmdbTv.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
)
? await getMetadataProvider('anime')
: await getMetadataProvider('tv');
const season = await metadataProvider.getTvSeason({ try {
const season = await tmdb.getTvSeason({
tvId: Number(req.params.id), tvId: Number(req.params.id),
seasonNumber: Number(req.params.seasonNumber), seasonNumber: Number(req.params.seasonNumber),
language: (req.query.language as string) ?? req.locale, language: (req.query.language as string) ?? req.locale,

View File

@@ -42,6 +42,9 @@ router.get('/', async (req, res, next) => {
: Math.max(10, includeIds.length); : Math.max(10, includeIds.length);
const skip = req.query.skip ? Number(req.query.skip) : 0; const skip = req.query.skip ? Number(req.query.skip) : 0;
const q = req.query.q ? req.query.q.toString().toLowerCase() : ''; const q = req.query.q ? req.query.q.toString().toLowerCase() : '';
const sortDirection =
(req.query.sortDirection as string) === 'asc' ? 'ASC' : 'DESC';
let query = getRepository(User).createQueryBuilder('user'); let query = getRepository(User).createQueryBuilder('user');
if (q) { if (q) {
@@ -56,28 +59,31 @@ router.get('/', async (req, res, next) => {
} }
switch (req.query.sort) { switch (req.query.sort) {
case 'created':
query = query.orderBy('user.createdAt', sortDirection);
break;
case 'updated': case 'updated':
query = query.orderBy('user.updatedAt', 'DESC'); query = query.orderBy('user.updatedAt', sortDirection);
break; break;
case 'displayname': case 'displayname':
query = query query = query
.addSelect( .addSelect(
`CASE WHEN (user.username IS NULL OR user.username = '') THEN ( `CASE WHEN (user.username IS NULL OR user.username = '') THEN (
CASE WHEN (user.plexUsername IS NULL OR user.plexUsername = '') THEN ( CASE WHEN (user.plexUsername IS NULL OR user.plexUsername = '') THEN (
CASE WHEN (user.jellyfinUsername IS NULL OR user.jellyfinUsername = '') THEN CASE WHEN (user.jellyfinUsername IS NULL OR user.jellyfinUsername = '') THEN
"user"."email" "user"."email"
ELSE
LOWER(user.jellyfinUsername)
END)
ELSE ELSE
LOWER(user.jellyfinUsername) LOWER(user.plexUsername)
END) END)
ELSE ELSE
LOWER(user.jellyfinUsername) LOWER(user.username)
END) END`,
ELSE
LOWER(user.username)
END`,
'displayname_sort_key' 'displayname_sort_key'
) )
.orderBy('displayname_sort_key', 'ASC'); .orderBy('displayname_sort_key', sortDirection);
break; break;
case 'requests': case 'requests':
query = query query = query
@@ -87,10 +93,16 @@ router.get('/', async (req, res, next) => {
.from(MediaRequest, 'request') .from(MediaRequest, 'request')
.where('request.requestedBy.id = user.id'); .where('request.requestedBy.id = user.id');
}, 'request_count') }, 'request_count')
.orderBy('request_count', 'DESC'); .orderBy('request_count', sortDirection);
break;
case 'usertype':
query = query.orderBy('user.userType', sortDirection);
break;
case 'role':
query = query.orderBy('user.permissions', sortDirection);
break; break;
default: default:
query = query.orderBy('user.id', 'ASC'); query = query.orderBy('user.id', sortDirection);
break; break;
} }

View File

@@ -33,93 +33,52 @@ import { EventSubscriber } from 'typeorm';
export class MediaRequestSubscriber export class MediaRequestSubscriber
implements EntitySubscriberInterface<MediaRequest> implements EntitySubscriberInterface<MediaRequest>
{ {
private async notifyAvailableMovie( private async notifyAvailableMovie(entity: MediaRequest) {
entity: MediaRequest,
event?: UpdateEvent<MediaRequest>
) {
// Get fresh media state using event manager
let latestMedia: Media | null = null;
if (event?.manager) {
latestMedia = await event.manager.findOne(Media, {
where: { id: entity.media.id },
});
}
if (!latestMedia) {
const mediaRepository = getRepository(Media);
latestMedia = await mediaRepository.findOne({
where: { id: entity.media.id },
});
}
// Check availability using fresh media state
if ( if (
!latestMedia || entity.media[entity.is4k ? 'status4k' : 'status'] ===
latestMedia[entity.is4k ? 'status4k' : 'status'] !== MediaStatus.AVAILABLE MediaStatus.AVAILABLE
) { ) {
return; const tmdb = new TheMovieDb();
}
const tmdb = new TheMovieDb(); try {
const movie = await tmdb.getMovie({
movieId: entity.media.tmdbId,
});
try { notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
const movie = await tmdb.getMovie({ event: `${entity.is4k ? '4K ' : ''}Movie Request Now Available`,
movieId: entity.media.tmdbId, notifyAdmin: false,
}); notifySystem: true,
notifyUser: entity.requestedBy,
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, { subject: `${movie.title}${
event: `${entity.is4k ? '4K ' : ''}Movie Request Now Available`, movie.release_date ? ` (${movie.release_date.slice(0, 4)})` : ''
notifyAdmin: false, }`,
notifySystem: true, message: truncate(movie.overview, {
notifyUser: entity.requestedBy, length: 500,
subject: `${movie.title}${ separator: /\s/,
movie.release_date ? ` (${movie.release_date.slice(0, 4)})` : '' omission: '…',
}`, }),
message: truncate(movie.overview, { media: entity.media,
length: 500, image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${movie.poster_path}`,
separator: /\s/, request: entity,
omission: '…', });
}), } catch (e) {
media: latestMedia, logger.error('Something went wrong sending media notification(s)', {
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${movie.poster_path}`, label: 'Notifications',
request: entity, errorMessage: e.message,
}); mediaId: entity.id,
} catch (e) { });
logger.error('Something went wrong sending media notification(s)', { }
label: 'Notifications',
errorMessage: e.message,
mediaId: entity.id,
});
} }
} }
private async notifyAvailableSeries( private async notifyAvailableSeries(entity: MediaRequest) {
entity: MediaRequest, // Find all seasons in the related media entity
event?: UpdateEvent<MediaRequest> // and see if they are available, then we can check
) { // if the request contains the same seasons
// Get fresh media state with seasons using event manager
let latestMedia: Media | null = null;
if (event?.manager) {
latestMedia = await event.manager.findOne(Media, {
where: { id: entity.media.id },
relations: { seasons: true },
});
}
if (!latestMedia) {
const mediaRepository = getRepository(Media);
latestMedia = await mediaRepository.findOne({
where: { id: entity.media.id },
relations: { seasons: true },
});
}
if (!latestMedia) {
return;
}
// Check availability using fresh media state
const requestedSeasons = const requestedSeasons =
entity.seasons?.map((entitySeason) => entitySeason.seasonNumber) ?? []; entity.seasons?.map((entitySeason) => entitySeason.seasonNumber) ?? [];
const availableSeasons = latestMedia.seasons.filter( const availableSeasons = entity.media.seasons.filter(
(season) => (season) =>
season[entity.is4k ? 'status4k' : 'status'] === MediaStatus.AVAILABLE && season[entity.is4k ? 'status4k' : 'status'] === MediaStatus.AVAILABLE &&
requestedSeasons.includes(season.seasonNumber) requestedSeasons.includes(season.seasonNumber)
@@ -128,46 +87,44 @@ export class MediaRequestSubscriber
availableSeasons.length > 0 && availableSeasons.length > 0 &&
availableSeasons.length === requestedSeasons.length; availableSeasons.length === requestedSeasons.length;
if (!isMediaAvailable) { if (isMediaAvailable) {
return; const tmdb = new TheMovieDb();
}
const tmdb = new TheMovieDb(); try {
const tv = await tmdb.getTvShow({ tvId: entity.media.tmdbId });
try { notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
const tv = await tmdb.getTvShow({ tvId: entity.media.tmdbId }); event: `${entity.is4k ? '4K ' : ''}Series Request Now Available`,
subject: `${tv.name}${
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, { tv.first_air_date ? ` (${tv.first_air_date.slice(0, 4)})` : ''
event: `${entity.is4k ? '4K ' : ''}Series Request Now Available`, }`,
subject: `${tv.name}${ message: truncate(tv.overview, {
tv.first_air_date ? ` (${tv.first_air_date.slice(0, 4)})` : '' length: 500,
}`, separator: /\s/,
message: truncate(tv.overview, { omission: '…',
length: 500, }),
separator: /\s/, notifyAdmin: false,
omission: '…', notifySystem: true,
}), notifyUser: entity.requestedBy,
notifyAdmin: false, image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${tv.poster_path}`,
notifySystem: true, media: entity.media,
notifyUser: entity.requestedBy, extra: [
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${tv.poster_path}`, {
media: latestMedia, name: 'Requested Seasons',
extra: [ value: entity.seasons
{ .map((season) => season.seasonNumber)
name: 'Requested Seasons', .join(', '),
value: entity.seasons },
.map((season) => season.seasonNumber) ],
.join(', '), request: entity,
}, });
], } catch (e) {
request: entity, logger.error('Something went wrong sending media notification(s)', {
}); label: 'Notifications',
} catch (e) { errorMessage: e.message,
logger.error('Something went wrong sending media notification(s)', { mediaId: entity.id,
label: 'Notifications', });
errorMessage: e.message, }
mediaId: entity.id,
});
} }
} }
@@ -292,17 +249,9 @@ export class MediaRequestSubscriber
} }
if (radarrSettings.tagRequests) { if (radarrSettings.tagRequests) {
const radarrTags = await radarr.getTags(); let userTag = (await radarr.getTags()).find((v) =>
// old tags had space around the hyphen
let userTag = radarrTags.find((v) =>
v.label.startsWith(entity.requestedBy.id + ' - ') v.label.startsWith(entity.requestedBy.id + ' - ')
); );
// new tags do not have spaces around the hyphen, since spaces are not allowed anymore
if (!userTag) {
userTag = radarrTags.find((v) =>
v.label.startsWith(entity.requestedBy.id + '-')
);
}
if (!userTag) { if (!userTag) {
logger.info(`Requester has no active tag. Creating new`, { logger.info(`Requester has no active tag. Creating new`, {
label: 'Media Request', label: 'Media Request',
@@ -310,11 +259,11 @@ export class MediaRequestSubscriber
mediaId: entity.media.id, mediaId: entity.media.id,
userId: entity.requestedBy.id, userId: entity.requestedBy.id,
newTag: newTag:
entity.requestedBy.id + '-' + entity.requestedBy.displayName, entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
}); });
userTag = await radarr.createTag({ userTag = await radarr.createTag({
label: label:
entity.requestedBy.id + '-' + entity.requestedBy.displayName, entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
}); });
} }
if (userTag.id) { if (userTag.id) {
@@ -609,17 +558,9 @@ export class MediaRequestSubscriber
} }
if (sonarrSettings.tagRequests) { if (sonarrSettings.tagRequests) {
const sonarrTags = await sonarr.getTags(); let userTag = (await sonarr.getTags()).find((v) =>
// old tags had space around the hyphen
let userTag = sonarrTags.find((v) =>
v.label.startsWith(entity.requestedBy.id + ' - ') v.label.startsWith(entity.requestedBy.id + ' - ')
); );
// new tags do not have spaces around the hyphen, since spaces are not allowed anymore
if (!userTag) {
userTag = sonarrTags.find((v) =>
v.label.startsWith(entity.requestedBy.id + '-')
);
}
if (!userTag) { if (!userTag) {
logger.info(`Requester has no active tag. Creating new`, { logger.info(`Requester has no active tag. Creating new`, {
label: 'Media Request', label: 'Media Request',
@@ -627,11 +568,11 @@ export class MediaRequestSubscriber
mediaId: entity.media.id, mediaId: entity.media.id,
userId: entity.requestedBy.id, userId: entity.requestedBy.id,
newTag: newTag:
entity.requestedBy.id + '-' + entity.requestedBy.displayName, entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
}); });
userTag = await sonarr.createTag({ userTag = await sonarr.createTag({
label: label:
entity.requestedBy.id + '-' + entity.requestedBy.displayName, entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
}); });
} }
if (userTag.id) { if (userTag.id) {
@@ -841,10 +782,10 @@ export class MediaRequestSubscriber
if (event.entity.status === MediaRequestStatus.COMPLETED) { if (event.entity.status === MediaRequestStatus.COMPLETED) {
if (event.entity.media.mediaType === MediaType.MOVIE) { if (event.entity.media.mediaType === MediaType.MOVIE) {
this.notifyAvailableMovie(event.entity as MediaRequest, event); this.notifyAvailableMovie(event.entity as MediaRequest);
} }
if (event.entity.media.mediaType === MediaType.TV) { if (event.entity.media.mediaType === MediaType.TV) {
this.notifyAvailableSeries(event.entity as MediaRequest, event); this.notifyAvailableSeries(event.entity as MediaRequest);
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show More