Compare commits
22 Commits
preview-se
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83843bb6c8 | ||
|
|
c2fe0fdc95 | ||
|
|
880fbc902d | ||
|
|
fba20c1b39 | ||
|
|
fa905be002 | ||
|
|
9da8bb6dea | ||
|
|
0e636a3f99 | ||
|
|
e0e4b6f512 | ||
|
|
dc1734d41f | ||
|
|
06e5eb0704 | ||
|
|
88afcc113d | ||
|
|
4939f13dbe | ||
|
|
5e57fdcf66 | ||
|
|
cf4883a55e | ||
|
|
5e64d49c32 | ||
|
|
c6bcfe0ae4 | ||
|
|
6076878f76 | ||
|
|
8f0c904928 | ||
|
|
04b9d87174 | ||
|
|
b499976902 | ||
|
|
87fb0dfd6c | ||
|
|
b6a913211a |
3
.github/ISSUE_TEMPLATE/bug.yml
vendored
3
.github/ISSUE_TEMPLATE/bug.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: 🐛 Bug Report
|
||||
description: Report a problem
|
||||
labels: ['bug', 'awaiting triage']
|
||||
labels: ['awaiting triage']
|
||||
type: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
63
.github/ISSUE_TEMPLATE/documentation.yml
vendored
Normal file
63
.github/ISSUE_TEMPLATE/documentation.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: 📚 Documentation
|
||||
description: Report a docs problem or suggest a docs improvement
|
||||
title: "[Docs]: "
|
||||
labels: ["documentation", "awaiting triage"]
|
||||
type: task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for helping improve the docs!
|
||||
|
||||
Use this template for documentation issues (typos, unclear steps, missing info, outdated screenshots).
|
||||
For app bugs or feature ideas, please use the other templates.
|
||||
- type: input
|
||||
id: doc-location
|
||||
attributes:
|
||||
label: Page / Location
|
||||
description: Link to the docs page or the file/path (e.g. https://docs.seerr.dev/... or README.md)
|
||||
placeholder: "https://docs.seerr.dev/..."
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: doc-area
|
||||
attributes:
|
||||
label: Docs Area
|
||||
options:
|
||||
- docs site
|
||||
- migration guide
|
||||
- README / repo docs
|
||||
- API / integrations
|
||||
- other
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: What’s wrong / missing?
|
||||
description: Describe the issue in the docs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: suggested-fix
|
||||
attributes:
|
||||
label: Suggested change
|
||||
description: If you know what should be changed, describe it (or paste proposed wording).
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: search-existing
|
||||
attributes:
|
||||
label: Search Existing Issues
|
||||
description: Have you searched existing issues to see if this has already been reported?
|
||||
options:
|
||||
- label: Yes, I have searched existing issues.
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our Code of Conduct.
|
||||
options:
|
||||
- label: I agree to follow Seerr's [Code of Conduct](https://github.com/seerr-team/seerr/blob/develop/CODE_OF_CONDUCT.md).
|
||||
required: true
|
||||
3
.github/ISSUE_TEMPLATE/enhancement.yml
vendored
3
.github/ISSUE_TEMPLATE/enhancement.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: ✨ Feature Request
|
||||
description: Suggest an idea
|
||||
labels: ['enhancement', 'awaiting triage']
|
||||
labels: ['awaiting triage']
|
||||
type: feature
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
65
.github/ISSUE_TEMPLATE/maintenance.yml
vendored
Normal file
65
.github/ISSUE_TEMPLATE/maintenance.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
name: 🧰 Maintenance / Chore
|
||||
description: CI, GitHub Actions, build, dependencies, refactors (non-feature work)
|
||||
title: "[Chore]: "
|
||||
labels: ["maintenance", "awaiting triage"]
|
||||
type: task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Maintainers / contributors: use this for internal tasks (CI, workflows, tooling, refactors).
|
||||
If you're reporting a user-facing bug or requesting a feature, use the other templates.
|
||||
- type: dropdown
|
||||
id: area
|
||||
attributes:
|
||||
label: Area
|
||||
options:
|
||||
- CI / GitHub Actions
|
||||
- build / packaging
|
||||
- dependencies
|
||||
- release process
|
||||
- refactor / tech debt
|
||||
- tooling / scripts
|
||||
- other
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: Summary
|
||||
description: What needs doing and why?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: acceptance
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: What does "done" look like?
|
||||
placeholder: |
|
||||
- [ ] ...
|
||||
- [ ] ...
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: related
|
||||
attributes:
|
||||
label: Related links
|
||||
description: PRs, failing workflow runs, logs, or relevant issues.
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: search-existing
|
||||
attributes:
|
||||
label: Search Existing Issues
|
||||
description: Have you searched existing issues to see if this has already been reported?
|
||||
options:
|
||||
- label: Yes, I have searched existing issues.
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our Code of Conduct.
|
||||
options:
|
||||
- label: I agree to follow Seerr's [Code of Conduct](https://github.com/seerr-team/seerr/blob/develop/CODE_OF_CONDUCT.md).
|
||||
required: true
|
||||
2
.github/renovate/helm.json5
vendored
2
.github/renovate/helm.json5
vendored
@@ -16,7 +16,7 @@
|
||||
description: 'Update appVersion in Chart.yaml to match Docker image',
|
||||
fileMatch: ['(^|/)Chart\\.yaml$'],
|
||||
matchStrings: [
|
||||
'#\\s+renovate:\\s+image=(?<depName>\\S*)\nappVersion:\\s+"(?<currentValue>\\S*)"',
|
||||
"#\\s+renovate:\\s+image=(?<depName>\\S*)\nappVersion:\\s+'(?<currentValue>\\S*)'",
|
||||
],
|
||||
datasourceTemplate: 'docker',
|
||||
},
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -18,7 +18,7 @@ env:
|
||||
DOCKER_HUB: seerr/seerr
|
||||
|
||||
concurrency:
|
||||
group: ci-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
|
||||
build:
|
||||
name: Build (per-arch, native runners)
|
||||
if: github.ref == 'refs/heads/develop' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
discord:
|
||||
name: Send Discord Notification
|
||||
needs: publish
|
||||
if: always() && github.event_name != 'pull_request' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
if: always() && github.event_name != 'pull_request'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Determine Workflow Status
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -20,7 +20,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: codeql-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/conflict_labeler.yml
vendored
2
.github/workflows/conflict_labeler.yml
vendored
@@ -14,7 +14,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: merge-conflict-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
87
.github/workflows/create-tag.yml
vendored
Normal file
87
.github/workflows/create-tag.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
---
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
|
||||
name: Create tag
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
determine-tag-version:
|
||||
name: Determine tag version
|
||||
if: github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
tag_version: ${{ steps.git-cliff.outputs.tag_version }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install git-cliff
|
||||
uses: taiki-e/install-action@cede0bb282aae847dfa8aacca3a41c86d973d4d7 # v2.68.1
|
||||
with:
|
||||
tool: git-cliff
|
||||
|
||||
- name: Get tag version
|
||||
id: git-cliff
|
||||
run: |
|
||||
tag_version=$(git-cliff -c .github/cliff.toml --bumped-version --unreleased)
|
||||
echo "Next tag version is ${tag_version}"
|
||||
echo "tag_version=${tag_version}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
create-tag:
|
||||
name: Create tag
|
||||
if: github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: write
|
||||
needs: determine-tag-version
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_VERSION: ${{ needs.determine-tag-version.outputs.tag_version }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
ssh-key: '${{ secrets.COMMIT_KEY }}'
|
||||
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version-file: 'package.json'
|
||||
# For workflows with elevated privileges we recommend disabling automatic caching.
|
||||
# https://github.com/actions/setup-node
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
|
||||
- name: Bump package.json
|
||||
run: npm version ${TAG_VERSION} --no-commit-hooks --no-git-tag-version
|
||||
|
||||
- name: Commit updated files
|
||||
run: |
|
||||
git add package.json
|
||||
git commit -m 'chore(release): prepare ${TAG_VERSION}'
|
||||
git push
|
||||
|
||||
- name: Create git tag
|
||||
run: |
|
||||
git tag ${TAG_VERSION}
|
||||
git push origin ${TAG_VERSION}
|
||||
2
.github/workflows/cypress.yml
vendored
2
.github/workflows/cypress.yml
vendored
@@ -28,7 +28,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: cypress-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/docs-deploy.yml
vendored
2
.github/workflows/docs-deploy.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: pages
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/docs-link-check.yml
vendored
2
.github/workflows/docs-link-check.yml
vendored
@@ -25,7 +25,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: docs-link-check-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/helm.yml
vendored
2
.github/workflows/helm.yml
vendored
@@ -14,7 +14,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: helm-charts
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/lint-helm-charts.yml
vendored
2
.github/workflows/lint-helm-charts.yml
vendored
@@ -18,7 +18,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: charts-lint-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/preview.yml
vendored
2
.github/workflows/preview.yml
vendored
@@ -15,7 +15,7 @@ env:
|
||||
DOCKER_HUB: seerr/seerr
|
||||
|
||||
concurrency:
|
||||
group: preview-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
41
.github/workflows/release.yml
vendored
41
.github/workflows/release.yml
vendored
@@ -11,7 +11,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: release-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
@@ -304,42 +304,3 @@ jobs:
|
||||
run: gh release edit "${{ env.VERSION }}" --draft=false --repo "${{ github.repository }}"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
discord:
|
||||
name: Send Discord Notification
|
||||
needs: publish-release
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Determine status
|
||||
id: status
|
||||
run: |
|
||||
case "${{ needs.publish-release.result }}" in
|
||||
success) echo "status=Success" >> $GITHUB_OUTPUT; echo "colour=3066993" >> $GITHUB_OUTPUT ;;
|
||||
failure) echo "status=Failure" >> $GITHUB_OUTPUT; echo "colour=15158332" >> $GITHUB_OUTPUT ;;
|
||||
cancelled) echo "status=Cancelled" >> $GITHUB_OUTPUT; echo "colour=10181046" >> $GITHUB_OUTPUT ;;
|
||||
*) echo "status=Skipped" >> $GITHUB_OUTPUT; echo "colour=9807270" >> $GITHUB_OUTPUT ;;
|
||||
esac
|
||||
|
||||
- name: Send notification
|
||||
run: |
|
||||
WEBHOOK="${{ secrets.DISCORD_WEBHOOK }}"
|
||||
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"embeds": [{
|
||||
"title": "${{ steps.status.outputs.status }}: ${{ github.workflow }}",
|
||||
"color": ${{ steps.status.outputs.colour }},
|
||||
"fields": [
|
||||
{ "name": "Repository", "value": "[${{ github.repository }}](${{ github.server_url }}/${{ github.repository }})", "inline": true },
|
||||
{ "name": "Ref", "value": "${{ github.ref }}", "inline": true },
|
||||
{ "name": "Event", "value": "${{ github.event_name }}", "inline": true },
|
||||
{ "name": "Triggered by", "value": "${{ github.actor }}", "inline": true },
|
||||
{ "name": "Workflow", "value": "[${{ github.workflow }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})", "inline": true }
|
||||
]
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -sS -H "Content-Type: application/json" -X POST -d "$PAYLOAD" "$WEBHOOK" || true
|
||||
|
||||
@@ -12,7 +12,7 @@ on:
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: renovate-helm-hooks-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
28
.github/workflows/semantic-pr.yml
vendored
Normal file
28
.github/workflows/semantic-pr.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: "Semantic PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- edited
|
||||
- synchronize
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
main:
|
||||
name: Validate PR Title
|
||||
runs-on: ubuntu-slim
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
checks: write
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: close-stale-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/test-docs-deploy.yml
vendored
2
.github/workflows/test-docs-deploy.yml
vendored
@@ -14,7 +14,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: docs-pr-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/trivy-scan.yml
vendored
2
.github/workflows/trivy-scan.yml
vendored
@@ -16,7 +16,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: trivy-scan-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
149
CHANNELS_DVR_INTEGRATION.md
Normal file
149
CHANNELS_DVR_INTEGRATION.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Channels DVR Integration for Seerr
|
||||
|
||||
**Status:** Phase 1 Complete (Core Integration)
|
||||
**Date:** 2026-02-20
|
||||
**Implemented by:** Synapse (Opus → Sonnet)
|
||||
|
||||
## Overview
|
||||
|
||||
Added Channels DVR as a 4th media server backend to Seerr (alongside Jellyfin, Plex, Emby).
|
||||
|
||||
## What Was Implemented
|
||||
|
||||
### 1. Media Server Type Enum (`server/constants/server.ts`)
|
||||
- Added `CHANNELS_DVR = 4` to `MediaServerType` enum
|
||||
|
||||
### 2. API Client (`server/api/channelsdvr.ts`)
|
||||
- Full REST API client for Channels DVR
|
||||
- Methods:
|
||||
- `getShows()` - List all TV shows
|
||||
- `getShow(id)` - Get specific show
|
||||
- `getShowEpisodes(id)` - Get episodes for a show
|
||||
- `getMovies()` - List all movies
|
||||
- `getMovie(id)` - Get specific movie
|
||||
- `testConnection()` - Connectivity test
|
||||
- TypeScript interfaces for all API responses
|
||||
|
||||
### 3. Library Scanner (`server/lib/scanners/channelsdvr/index.ts`)
|
||||
- Scans Channels DVR library and maps to Seerr
|
||||
- **Key feature:** TMDb ID lookup by title/year search
|
||||
- Processes movies and TV shows
|
||||
- Handles episode/season grouping
|
||||
- Tracks processing status
|
||||
|
||||
### 4. Settings Integration (`server/lib/settings/index.ts`)
|
||||
- Added `ChannelsDVRSettings` interface
|
||||
- Added to `AllSettings` with default initialization
|
||||
- Configuration fields:
|
||||
- `name`: Display name
|
||||
- `url`: Channels DVR server URL (e.g., http://192.168.0.15:8089)
|
||||
- `libraries`: Library configuration array
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **User configures Channels DVR URL** in Seerr settings
|
||||
2. **Scanner connects** via REST API (no auth needed!)
|
||||
3. **Fetches all content** (movies + TV shows)
|
||||
4. **Maps to TMDb** by searching title + year
|
||||
5. **Processes into Seerr database** for request management
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
### Why TMDb Search Instead of Direct IDs?
|
||||
- Channels DVR doesn't provide TMDb/IMDb IDs in API
|
||||
- Uses program_id (Gracenote/TMS identifiers)
|
||||
- Solution: Search TMDb by title + release year
|
||||
- First result is used (good enough for most cases)
|
||||
|
||||
### Why No Authentication?
|
||||
- Channels DVR API has no auth (local network only)
|
||||
- Simplifies implementation
|
||||
- Security via network isolation
|
||||
|
||||
### Why Simplified Scanner?
|
||||
- Channels DVR doesn't expose resolution info via API
|
||||
- Defaults all content to non-4K
|
||||
- Future enhancement: parse video files for resolution
|
||||
|
||||
## What's NOT Done (Phase 2 & 3)
|
||||
|
||||
### Phase 2: UI Integration (TODO)
|
||||
- [ ] Settings page for Channels DVR URL configuration
|
||||
- [ ] Server connection test button
|
||||
- [ ] Library selection UI
|
||||
- [ ] Server type selector (Jellyfin/Plex/Emby/Channels DVR)
|
||||
|
||||
### Phase 3: Testing & Polish (TODO)
|
||||
- [ ] Test with real Channels DVR instance (http://192.168.0.15:8089)
|
||||
- [ ] Handle edge cases:
|
||||
- Shows/movies not found on TMDb
|
||||
- Network errors
|
||||
- Invalid URLs
|
||||
- [ ] Add proper error messages
|
||||
- [ ] Document configuration for users
|
||||
- [ ] Consider PR to upstream Seerr project
|
||||
|
||||
## Testing Instructions
|
||||
|
||||
### Prerequisites
|
||||
1. Channels DVR server running (http://192.168.0.15:8089)
|
||||
2. Seerr development environment set up
|
||||
3. Node.js + pnpm installed
|
||||
|
||||
### Manual Testing Steps
|
||||
```bash
|
||||
# 1. Install dependencies
|
||||
cd /home/node/.openclaw/workspace/seerr-explore
|
||||
pnpm install
|
||||
|
||||
# 2. Build the project
|
||||
pnpm build
|
||||
|
||||
# 3. Start Seerr
|
||||
pnpm start
|
||||
|
||||
# 4. Configure via UI:
|
||||
# - Go to Settings → Channels DVR
|
||||
# - Enter URL: http://192.168.0.15:8089
|
||||
# - Save
|
||||
|
||||
# 5. Trigger scan:
|
||||
# - Settings → Library Sync → Scan Channels DVR
|
||||
```
|
||||
|
||||
### API Testing (Without Full Seerr)
|
||||
```bash
|
||||
# Test Channels DVR API directly
|
||||
curl http://192.168.0.15:8089/api/v1/shows | jq '.[0]'
|
||||
curl http://192.168.0.15:8089/api/v1/movies | jq '.[0]'
|
||||
```
|
||||
|
||||
## Files Changed
|
||||
|
||||
- `server/constants/server.ts` - Added enum value
|
||||
- `server/api/channelsdvr.ts` - New API client
|
||||
- `server/lib/scanners/channelsdvr/index.ts` - New scanner
|
||||
- `server/lib/settings/index.ts` - Added settings interface
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Commit changes** to git
|
||||
2. **Test with real Channels DVR** instance
|
||||
3. **Build UI** for configuration (Phase 2)
|
||||
4. **Polish & document** (Phase 3)
|
||||
5. **Consider upstream PR** to Seerr project
|
||||
|
||||
## Notes
|
||||
|
||||
- Used Opus for architecture/planning phase
|
||||
- Downgraded to Sonnet for implementation details
|
||||
- Code follows existing Seerr patterns (Jellyfin scanner as reference)
|
||||
- TypeScript types are complete and match Channels DVR API
|
||||
- Ready for testing with real instance
|
||||
|
||||
## Resources
|
||||
|
||||
- Channels DVR API Docs: https://getchannels.com/docs/server-api/introduction/
|
||||
- Channels DVR Instance: http://192.168.0.15:8089
|
||||
- Seerr GitHub: https://github.com/seerr-team/seerr
|
||||
- Our Fork: https://git.bytesnap.io/ByteSnap/channels-seerr
|
||||
@@ -6,6 +6,12 @@ All help is welcome and greatly appreciated! If you would like to contribute to
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> Automated AI-generated contributions without human review are not allowed and will be rejected.
|
||||
> This is an open-source project maintained by volunteers.
|
||||
> We do not have the resources to review pull requests that could have been avoided with proper human oversight.
|
||||
> While we have no issue with contributors using AI tools as an aid, it is your responsibility as a contributor to ensure that all submissions are carefully reviewed and meet our quality standards.
|
||||
> Submissions that appear to be unreviewed AI output will be considered low-effort and may result in a ban.
|
||||
>
|
||||
> If you are using **any kind of AI assistance** to contribute to Seerr,
|
||||
> it must be disclosed in the pull request.
|
||||
|
||||
@@ -122,7 +128,7 @@ Steps:
|
||||
|
||||
- If you are taking on an existing bug or feature ticket, please comment on the [issue](/../../issues) to avoid multiple people working on the same thing.
|
||||
- All commits **must** follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/)
|
||||
- Pull requests with commits not following this standard will **not** be merged.
|
||||
- Pull requests with titles or commits not following this standard will **not** be merged. PR titles are automatically checked for compliance.
|
||||
- Please make meaningful commits, or squash them prior to opening a pull request.
|
||||
- Do not squash commits once people have begun reviewing your changes.
|
||||
- Always rebase your commit to the latest `develop` branch. Do **not** merge `develop` into your branch.
|
||||
|
||||
62
README-TESTING.md
Normal file
62
README-TESTING.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# Testing Channels-Seerr with Docker
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Build and run:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.test.yml up --build
|
||||
```
|
||||
|
||||
2. **Access the web UI:**
|
||||
- Open browser: http://localhost:5055
|
||||
- Complete the setup wizard
|
||||
- Add your Channels DVR server in Settings
|
||||
|
||||
3. **Stop:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.test.yml down
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Config directory:** `./config` (created automatically, persists settings)
|
||||
- **Logs:** `docker-compose logs -f seerr`
|
||||
- **Port:** Default 5055 (change in docker-compose.test.yml if needed)
|
||||
|
||||
## Testing Channels DVR Integration
|
||||
|
||||
1. Start Seerr container
|
||||
2. Navigate to Settings → Channels DVR
|
||||
3. Add your Channels DVR server:
|
||||
- **Server URL:** http://your-channels-server:8089
|
||||
- **Test connection** to verify
|
||||
4. Enable sync jobs (manual or scheduled)
|
||||
5. Check logs for sync activity:
|
||||
```bash
|
||||
docker-compose -f docker-compose.test.yml logs -f seerr | grep -i channels
|
||||
```
|
||||
|
||||
## Development Testing
|
||||
|
||||
For faster iteration without full rebuilds:
|
||||
|
||||
```bash
|
||||
# Use Dockerfile.local for development
|
||||
docker build -f Dockerfile.local -t channels-seerr:dev .
|
||||
docker run -p 5055:5055 -v ./config:/app/config channels-seerr:dev
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Build fails:**
|
||||
- Check Node.js version (requires 22.x)
|
||||
- Try: `docker-compose -f docker-compose.test.yml build --no-cache`
|
||||
|
||||
**Can't connect to Channels DVR:**
|
||||
- If Channels is on host machine: Use `http://host.docker.internal:8089`
|
||||
- If on tailnet: Use the Tailscale IP
|
||||
- Check firewall allows connections from Docker network
|
||||
|
||||
**Database issues:**
|
||||
- SQLite (default): Stored in `./config/db/db.sqlite3`
|
||||
- To use Postgres: Uncomment postgres service in docker-compose.test.yml
|
||||
@@ -3,9 +3,9 @@ kubeVersion: '>=1.23.0-0'
|
||||
name: seerr-chart
|
||||
description: Seerr helm chart for Kubernetes
|
||||
type: application
|
||||
version: 3.0.0
|
||||
version: 3.1.0
|
||||
# renovate: image=ghcr.io/seerr-team/seerr
|
||||
appVersion: '3.0.0'
|
||||
appVersion: 'v3.0.1'
|
||||
maintainers:
|
||||
- name: Seerr Team
|
||||
url: https://github.com/orgs/seerr-team/people
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# seerr-chart
|
||||
|
||||
  
|
||||
  
|
||||
|
||||
Seerr helm chart for Kubernetes
|
||||
|
||||
@@ -44,9 +44,10 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| affinity | object | `{}` | |
|
||||
| config | object | `{"persistence":{"accessModes":["ReadWriteOnce"],"annotations":{},"name":"","size":"5Gi","volumeName":""}}` | Creating PVC to store configuration |
|
||||
| config | object | `{"persistence":{"accessModes":["ReadWriteOnce"],"annotations":{},"existingClaim":"","name":"","size":"5Gi","volumeName":""}}` | Creating PVC to store configuration |
|
||||
| config.persistence.accessModes | list | `["ReadWriteOnce"]` | Access modes of persistent disk |
|
||||
| config.persistence.annotations | object | `{}` | Annotations for PVCs |
|
||||
| config.persistence.existingClaim | string | `""` | Specify an existing `PersistentVolumeClaim` to use. If this value is provided, the default PVC will not be created |
|
||||
| config.persistence.name | string | `""` | Config name |
|
||||
| config.persistence.size | string | `"5Gi"` | Size of persistent disk |
|
||||
| config.persistence.volumeName | string | `""` | Name of the permanent volume to reference in the claim. Can be used to bind to existing volumes. |
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
{{- if not .Values.config.persistence.existingClaim -}}
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
@@ -22,3 +23,4 @@ spec:
|
||||
resources:
|
||||
requests:
|
||||
storage: "{{ .Values.config.persistence.size }}"
|
||||
{{- end -}}
|
||||
@@ -103,7 +103,7 @@ spec:
|
||||
volumes:
|
||||
- name: config
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ include "seerr.configPersistenceName" . }}
|
||||
claimName: {{ if .Values.config.persistence.existingClaim }}{{ .Values.config.persistence.existingClaim }}{{- else }}{{ include "seerr.configPersistenceName" . }}{{- end }}
|
||||
{{- with .Values.volumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -86,6 +86,8 @@ config:
|
||||
# -- Name of the permanent volume to reference in the claim.
|
||||
# Can be used to bind to existing volumes.
|
||||
volumeName: ''
|
||||
# -- Specify an existing `PersistentVolumeClaim` to use. If this value is provided, the default PVC will not be created
|
||||
existingClaim: ''
|
||||
|
||||
ingress:
|
||||
enabled: false
|
||||
|
||||
35
docker-compose.test.yml
Normal file
35
docker-compose.test.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
seerr:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
COMMIT_TAG: channels-dvr-test
|
||||
container_name: channels-seerr-test
|
||||
hostname: seerr
|
||||
ports:
|
||||
- "5055:5055"
|
||||
environment:
|
||||
- LOG_LEVEL=debug
|
||||
- TZ=America/Chicago
|
||||
volumes:
|
||||
- ./config:/app/config
|
||||
restart: unless-stopped
|
||||
|
||||
# Optional: PostgreSQL for production-like testing
|
||||
# Uncomment if you want to test with Postgres instead of SQLite
|
||||
# postgres:
|
||||
# image: postgres:15-alpine
|
||||
# container_name: seerr-postgres
|
||||
# environment:
|
||||
# - POSTGRES_PASSWORD=seerr
|
||||
# - POSTGRES_USER=seerr
|
||||
# - POSTGRES_DB=seerr
|
||||
# volumes:
|
||||
# - postgres-data:/var/lib/postgresql/data
|
||||
# restart: unless-stopped
|
||||
|
||||
# volumes:
|
||||
# postgres-data:
|
||||
@@ -30,7 +30,7 @@ If your PostgreSQL server is configured to accept TCP connections, you can speci
|
||||
|
||||
```dotenv
|
||||
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite.
|
||||
DB_HOST="localhost" # (optional) The host (URL) of the database. The default is "localhost".
|
||||
DB_HOST=localhost # (optional) The host (URL) of the database. The default is "localhost".
|
||||
DB_PORT="5432" # (optional) The port to connect to. The default is "5432".
|
||||
DB_USER= # (required) Username used to connect to the database.
|
||||
DB_PASS= # (required) Password of the user used to connect to the database.
|
||||
|
||||
111
docs/getting-started/third-parties/synology.mdx
Normal file
111
docs/getting-started/third-parties/synology.mdx
Normal file
@@ -0,0 +1,111 @@
|
||||
---
|
||||
title: Synology (Advanced)
|
||||
description: Install Seerr on Synology NAS using SynoCommunity
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Synology
|
||||
|
||||
:::warning
|
||||
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
|
||||
:::
|
||||
|
||||
:::warning
|
||||
This method is not recommended for most users. It is intended for advanced users who are using Synology NAS.
|
||||
:::
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Synology NAS running **DSM 7.2** or later
|
||||
- 64-bit architecture (x86_64 or ARMv8)
|
||||
- [SynoCommunity package source](https://synocommunity.com/) added to Package Center
|
||||
|
||||
## Adding the SynoCommunity Package Source
|
||||
|
||||
If you haven't already added SynoCommunity to your Package Center:
|
||||
|
||||
1. Open **Package Center** in DSM
|
||||
2. Click **Settings** in the top-right corner
|
||||
3. Go to the **Package Sources** tab
|
||||
4. Click **Add**
|
||||
5. Enter the following:
|
||||
- **Name**: `SynoCommunity`
|
||||
- **Location**: `https://packages.synocommunity.com`
|
||||
6. Click **OK**
|
||||
|
||||
## Installation
|
||||
|
||||
1. In **Package Center**, search for **Seerr**
|
||||
2. Click **Install**
|
||||
3. Follow the installation wizard prompts
|
||||
4. Package Center will automatically install any required dependencies (Node.js v22)
|
||||
|
||||
### Access Seerr
|
||||
|
||||
Once installed, access Seerr at:
|
||||
|
||||
```
|
||||
http://<your-synology-ip>:5055
|
||||
```
|
||||
|
||||
You can also click the **Open** button in Package Center or find Seerr in the DSM main menu.
|
||||
|
||||
## Configuration
|
||||
|
||||
Seerr's configuration files are stored at:
|
||||
|
||||
```
|
||||
/var/packages/seerr/var/config
|
||||
```
|
||||
|
||||
:::info
|
||||
The Seerr package runs as a dedicated service user managed by DSM. No manual permission configuration is required.
|
||||
:::
|
||||
|
||||
## Managing the Service
|
||||
|
||||
You can start, stop, and restart Seerr from **Package Center** → Find Seerr → Use the action buttons.
|
||||
|
||||
## Updating
|
||||
|
||||
When a new version is available:
|
||||
|
||||
1. Open **Package Center**
|
||||
2. Go to **Installed** packages
|
||||
3. Find **Seerr** and click **Update** if available
|
||||
|
||||
:::tip
|
||||
Enable automatic updates in Package Center settings to keep Seerr up to date.
|
||||
:::
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Viewing Logs
|
||||
|
||||
Seerr logs are located at `/var/packages/seerr/var/config/logs` and can be accessed using:
|
||||
|
||||
- **File Browser** package (recommended for most users)
|
||||
- SSH (advanced users)
|
||||
|
||||
### Port Conflicts
|
||||
|
||||
Seerr uses port 5055. If this port is already in use:
|
||||
|
||||
- **Docker containers**: Remap the conflicting container to a different port
|
||||
- **Other packages**: The conflicting package will need to be uninstalled as Seerr's port cannot be changed
|
||||
|
||||
SynoCommunity ensures there are no port conflicts with other SynoCommunity packages or official Synology packages.
|
||||
|
||||
### Package Won't Start
|
||||
|
||||
Ensure Node.js v22 is installed and running by checking its status in **Package Center**.
|
||||
|
||||
## Uninstallation
|
||||
|
||||
1. Open **Package Center**
|
||||
2. Find **Seerr** in your installed packages
|
||||
3. Click **Uninstall**
|
||||
|
||||
:::caution
|
||||
Uninstalling will remove the application but preserve your configuration data by default. Select "Remove data" during uninstallation if you want a complete removal.
|
||||
:::
|
||||
@@ -4,12 +4,6 @@ description: Install Seerr using TrueNAS
|
||||
sidebar_position: 4
|
||||
---
|
||||
# TrueNAS
|
||||
:::danger
|
||||
This method has not yet been updated for Seerr and is currently a work in progress.
|
||||
You can follow the ongoing work on this issue https://github.com/truenas/apps/issues/3374.
|
||||
:::
|
||||
|
||||
<!--
|
||||
:::warning
|
||||
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
|
||||
:::
|
||||
@@ -17,4 +11,7 @@ Third-party installation methods are maintained by the community. The Seerr team
|
||||
:::warning
|
||||
This method is not recommended for most users. It is intended for advanced users who are using TrueNAS distribution.
|
||||
:::
|
||||
-->
|
||||
|
||||
## Installation
|
||||
|
||||
Go to the 'Apps' menu, click the 'Discover Apps' button in the top right, search for 'Seerr' in the search bar, and install the app.
|
||||
|
||||
@@ -5,12 +5,7 @@ sidebar_position: 3
|
||||
---
|
||||
|
||||
# Unraid
|
||||
:::danger
|
||||
This method has not yet been updated for Seerr and is awaiting a community contribution.
|
||||
Feel free to open a pull request on GitHub to update this installation method.
|
||||
:::
|
||||
|
||||
<!--
|
||||
:::warning
|
||||
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
|
||||
:::
|
||||
@@ -19,9 +14,76 @@ Third-party installation methods are maintained by the community. The Seerr team
|
||||
This method is not recommended for most users. It is intended for advanced users who are using Unraid.
|
||||
:::
|
||||
|
||||
1. Ensure you have the **Community Applications** plugin installed.
|
||||
2. Inside the **Community Applications** app store, search for **Seerr**.
|
||||
3. Click the **Install Button**.
|
||||
4. On the following **Add Container** screen, make changes to the **Host Port** and **Host Path 1** \(Appdata\) as needed.
|
||||
5. Click apply and access "Seerr" at your `<ServerIP:HostPort>` in a web browser.
|
||||
-->
|
||||
|
||||
If an official Unraid Community Applications template for Seerr isn't available in your catalog, you can install Seerr manually using Unraid's Docker UI.
|
||||
|
||||
## Fresh Installation
|
||||
|
||||
### 1. Create the config directory
|
||||
|
||||
:::note
|
||||
Seerr is now rootless. Unraid typically runs Docker containers as `nobody:users` (UID 99, GID 100), but Seerr now runs internally as UID 1000, GID 1000. This creates a permission mismatch.
|
||||
:::
|
||||
|
||||
:::info
|
||||
**If migrating**: Copy your existing Jellyseerr/Overseerr config files (e.g., from `/mnt/user/appdata/overseerr/` or `/mnt/user/appdata/jellyseerr`) to `/mnt/user/appdata/seerr`, then apply the permissions below
|
||||
:::
|
||||
|
||||
Open the Unraid terminal and run:
|
||||
|
||||
```bash
|
||||
mkdir -p /mnt/user/appdata/seerr
|
||||
chown -R 1000:1000 /mnt/user/appdata/seerr
|
||||
```
|
||||
|
||||
### 2. Add the Docker container
|
||||
|
||||
Navigate to the **Docker** tab in Unraid and click **Add Container**. Fill in the following:
|
||||
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| **Name** | `seerr` |
|
||||
| **Repository** | `ghcr.io/seerr-team/seerr:latest` |
|
||||
| **Registry URL** (optional) | `https://ghcr.io` |
|
||||
| **Icon URL** | `https://raw.githubusercontent.com/seerr-team/seerr/develop/public/android-chrome-512x512.png` |
|
||||
| **WebUI** | `http://[IP]:[PORT:5055]` |
|
||||
| **Extra Parameters** | `--init` |
|
||||
| **Network Type** | `bridge` |
|
||||
| **Privileged** | `Off` |
|
||||
|
||||
Then click **Add another Path, Port, Variable** to add:
|
||||
|
||||
**Port:**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Container Port | `5055` |
|
||||
| Host Port | `5055` |
|
||||
| Connection Type | `TCP` |
|
||||
|
||||
**Path:**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Container Path | `/app/config` |
|
||||
| Host Path | `/mnt/user/appdata/seerr` |
|
||||
|
||||
**Variable:**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Key | `TZ` |
|
||||
| Value | Your [TZ database name](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) (e.g., `America/New_York`) |
|
||||
|
||||
**Variable (optional):**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Key | `LOG_LEVEL` |
|
||||
| Value | `info` |
|
||||
|
||||
Click **Apply** to create and start the container.
|
||||
|
||||
### 3. Access Seerr
|
||||
|
||||
Open the WebUI at `http://<your-unraid-ip>:5055` and follow the setup wizard.
|
||||
|
||||
:::info
|
||||
The `--init` flag in **Extra Parameters** is required. Seerr does not include its own init process, so `--init` ensures proper signal handling and clean container shutdowns.
|
||||
:::
|
||||
|
||||
@@ -23,7 +23,6 @@ Installation methods are now divided into two categories: official and third-par
|
||||
The Seerr team is only responsible for official installation methods, while third-party methods are maintained by the community.
|
||||
Some methods are currently not maintained, but this does not mean they are permanently discontinued. The community may restore and support them if they choose to do so.
|
||||
|
||||
- **Unraid app:** Not maintained
|
||||
- **Snap package:** Not maintained
|
||||
:::
|
||||
|
||||
@@ -211,4 +210,106 @@ See https://aur.archlinux.org/packages/seerr
|
||||
|
||||
### TrueNAS
|
||||
|
||||
Waiting for https://github.com/truenas/apps/issues/3374
|
||||
Refer to [Seerr TrueNAS Documentation](/getting-started/third-parties/truenas), all of our examples have been updated to reflect the below change.
|
||||
|
||||
<Tabs groupId="truenas-migration" queryString>
|
||||
<TabItem value="hostpath" label="Host Path">
|
||||
**This guide describes how to migrate from Host Path storage (not ixVolume).**
|
||||
1. Stop Jellyseerr/Overseerr
|
||||
2. Install Seerr and use the same Host Path storage that was used by Jellyseerr/Overseerr
|
||||
3. Start Seerr app
|
||||
4. Delete Jellyseerr/Overseerr app
|
||||
</TabItem>
|
||||
<TabItem value="ixvolume" label="ixVolume">
|
||||
**This guide describes how to migrate from ixVolume storage (not Host Path).**
|
||||
1. Stop Jellyseerr/Overseerr
|
||||
2. Create a dataset for Seerr
|
||||
If your apps normally store data under something like:
|
||||
```
|
||||
/mnt/storage/<app-name>
|
||||
```
|
||||
then create a dataset named:
|
||||
```
|
||||
storage/seerr
|
||||
```
|
||||
resulting in:
|
||||
```
|
||||
/mnt/storage/seerr
|
||||
```
|
||||
3. Copy ixVolume Data
|
||||
Open System Settings → Shell, or SSH into your TrueNAS server as root and run :
|
||||
```bash
|
||||
rsync -av /mnt/.ix-apps/app_mounts/jellyseerr/ /mnt/storage/seerr/
|
||||
```
|
||||
4. Install Seerr and use the same Host Path storage that was created before (`/mnt/storage/seerr/config` in our example)
|
||||
5. Start Seerr app
|
||||
6. Delete Jellyseerr/Overseerr app
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Unraid
|
||||
|
||||
Refer to [Seerr Unraid Documentation](/getting-started/third-parties/unraid), all of our examples have been updated to reflect the below change.
|
||||
|
||||
Seerr will automatically migrate your existing Overseerr or Jellyseerr data on first startup. No manual database migration is needed.
|
||||
|
||||
1. Stop and remove the old Overseerr (or Jellyseerr) container from the Unraid **Docker** tab. Click the container icon, then **Stop**, then **Remove**. **⚠️ Do not delete the appdata folder ⚠️**
|
||||
|
||||
2. Back up your existing appdata folder:
|
||||
```bash
|
||||
cp -a /mnt/user/appdata/overseerr /mnt/user/appdata/overseerr-backup
|
||||
```
|
||||
|
||||
3. Fix config folder permissions — Seerr runs as the `node` user (UID 1000) instead of root:
|
||||
```bash
|
||||
chown -R 1000:1000 /mnt/user/appdata/overseerr
|
||||
```
|
||||
For Jellyseerr users, replace `overseerr` with `jellyseerr` in the path above.
|
||||
|
||||
4. Add a new container in the Unraid **Docker** tab. Click **Add Container** and fill in the following:
|
||||
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| **Name** | `seerr` |
|
||||
| **Repository** | `ghcr.io/seerr-team/seerr:latest` |
|
||||
| **Registry URL** (optional) | `https://ghcr.io` |
|
||||
| **Icon URL** | `https://raw.githubusercontent.com/seerr-team/seerr/develop/public/android-chrome-512x512.png` |
|
||||
| **WebUI** | `http://[IP]:[PORT:5055]` |
|
||||
| **Extra Parameters** | `--init` |
|
||||
| **Network Type** | `bridge` |
|
||||
| **Privileged** | `Off` |
|
||||
|
||||
Then click **Add another Path, Port, Variable** to add:
|
||||
|
||||
**Port:**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Container Port | `5055` |
|
||||
| Host Port | `5055` |
|
||||
| Connection Type | `TCP` |
|
||||
|
||||
**Path** — point this to your existing config folder:
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Container Path | `/app/config` |
|
||||
| Host Path | `/mnt/user/appdata/overseerr` |
|
||||
|
||||
For Jellyseerr users, use `/mnt/user/appdata/jellyseerr`.
|
||||
|
||||
**Variable:**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Key | `TZ` |
|
||||
| Value | Your [TZ database name](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) (e.g., `America/New_York`) |
|
||||
|
||||
**Variable (optional):**
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Key | `LOG_LEVEL` |
|
||||
| Value | `info` |
|
||||
|
||||
5. Click **Apply** to start the container. Check the container logs to confirm the automatic migration completed successfully.
|
||||
|
||||
:::tip
|
||||
If you are using a reverse proxy (such as SWAG or Nginx Proxy Manager), update your proxy configuration to point to the new container name `seerr`. The default port remains `5055`.
|
||||
:::
|
||||
|
||||
@@ -6,18 +6,22 @@ sidebar_position: 2
|
||||
|
||||
# Web Push
|
||||
|
||||
The web push notification agent enables you and your users to receive Seerr notifications in a supported browser.
|
||||
|
||||
This notification agent does not require any configuration, but is not enabled in Seerr
|
||||
|
||||
:::warning
|
||||
Web push notifications require a secure connection to your Seerr instance. Refer to the [Reverse Proxy](/extending-seerr/reverse-proxy) documentation for more information.
|
||||
:::
|
||||
|
||||
To set up web push notifications, simply enable the agent in **Settings → Notifications → Web Push**. You and your users will then be prompted to allow notifications in your web browser.
|
||||
The web push notification agent enables you and your users to receive Seerr notifications in a supported browser. This offers a native notification experience without the need to install an app.
|
||||
|
||||
Users can opt out of these notifications, or customize the notification types they would like to subscribe to, in their user settings.
|
||||
This notification agent does not require any configuration, but is not enabled by default in Seerr.
|
||||
|
||||
:::info
|
||||
Web push notifications offer a native notification experience without the need to install an app.
|
||||
To set up web push notifications, simply enable the agent in **Settings → Notifications → Web Push**.
|
||||
|
||||
You and your users have the option to enable web push notifications by going to your **User Profile → Edit Settings → Notifications → Web Push → Enable web push**. Here you can also customize the notifications you'd like to receive.
|
||||
|
||||
:::info[Mobile Users]
|
||||
For Web Push notifications to work on mobile you need to add Seerr to your home screen as progressive web app (PWA).
|
||||
:::
|
||||
|
||||
:::info[iOS Users]
|
||||
On iOS you may need to enable the Safari notifications feature flag by going to **Settings → Safari → Advanced → Feature Flags** and enabling "Notifications".
|
||||
:::
|
||||
|
||||
@@ -19,7 +19,7 @@ Please check how to migrate to Seerr in our [migration guide](https://docs.seerr
|
||||
|
||||
Seerr brings several features that were previously available in Jellyseerr but missing from Overseerr. These additions improve flexibility, performance, and overall control for admins and power users:
|
||||
|
||||
* **Alternative media solution:** Added support for Jellyfin and Emby in addition to the existing Plex integration.
|
||||
* **Alternative media solution:** Added support for Jellyfin and Emby as alternatives to Plex. Only one integration can be used at a time.
|
||||
* **PostgreSQL support**: In addition to SQLite, you can now opt in to using a PostgreSQL database.
|
||||
* **Blocklist for movies, series, and tags**: Allows permitted users to hide movies, series, or tags from regular users.
|
||||
* **Override rules**: Adjust default request settings based on conditions such as user, tag, or other criteria.
|
||||
|
||||
@@ -16,7 +16,12 @@ const config: Config = {
|
||||
deploymentBranch: 'gh-pages',
|
||||
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
||||
markdown: {
|
||||
hooks: {
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
},
|
||||
},
|
||||
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
|
||||
220
server/api/channelsdvr.ts
Normal file
220
server/api/channelsdvr.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import { getAppVersion } from '@server/utils/appVersion';
|
||||
import logger from '@server/logger';
|
||||
|
||||
export interface ChannelsDVRShow {
|
||||
id: string;
|
||||
name: string;
|
||||
summary: string;
|
||||
image_url: string;
|
||||
release_year: number;
|
||||
release_date: string;
|
||||
genres: string[];
|
||||
categories: string[];
|
||||
labels: string[];
|
||||
cast: string[];
|
||||
episode_count: number;
|
||||
number_unwatched: number;
|
||||
favorited: boolean;
|
||||
last_watched_at?: number;
|
||||
last_recorded_at?: number;
|
||||
created_at: number;
|
||||
updated_at: number;
|
||||
}
|
||||
|
||||
export interface ChannelsDVRMovie {
|
||||
id: string;
|
||||
program_id: string;
|
||||
path: string;
|
||||
channel: string;
|
||||
title: string;
|
||||
summary: string;
|
||||
full_summary: string;
|
||||
content_rating: string;
|
||||
image_url: string;
|
||||
thumbnail_url: string;
|
||||
duration: number;
|
||||
playback_time: number;
|
||||
release_year: number;
|
||||
release_date: string;
|
||||
genres: string[];
|
||||
tags: string[];
|
||||
labels: string[];
|
||||
categories: string[];
|
||||
cast: string[];
|
||||
directors: string[];
|
||||
watched: boolean;
|
||||
favorited: boolean;
|
||||
delayed: boolean;
|
||||
cancelled: boolean;
|
||||
corrupted: boolean;
|
||||
completed: boolean;
|
||||
processed: boolean;
|
||||
verified: boolean;
|
||||
last_watched_at?: number;
|
||||
created_at: number;
|
||||
updated_at: number;
|
||||
}
|
||||
|
||||
export interface ChannelsDVREpisode {
|
||||
id: string;
|
||||
show_id: string;
|
||||
program_id: string;
|
||||
path: string;
|
||||
channel: string;
|
||||
season_number: number;
|
||||
episode_number: number;
|
||||
title: string;
|
||||
episode_title: string;
|
||||
summary: string;
|
||||
full_summary: string;
|
||||
content_rating: string;
|
||||
image_url: string;
|
||||
thumbnail_url: string;
|
||||
duration: number;
|
||||
playback_time: number;
|
||||
original_air_date: string;
|
||||
genres: string[];
|
||||
tags: string[];
|
||||
categories: string[];
|
||||
cast: string[];
|
||||
commercials: number[];
|
||||
watched: boolean;
|
||||
favorited: boolean;
|
||||
delayed: boolean;
|
||||
cancelled: boolean;
|
||||
corrupted: boolean;
|
||||
completed: boolean;
|
||||
processed: boolean;
|
||||
locked: boolean;
|
||||
verified: boolean;
|
||||
created_at: number;
|
||||
updated_at: number;
|
||||
}
|
||||
|
||||
class ChannelsDVRAPI extends ExternalAPI {
|
||||
constructor(baseUrl: string) {
|
||||
super(
|
||||
baseUrl,
|
||||
{},
|
||||
{
|
||||
headers: {
|
||||
'User-Agent': `Seerr/${getAppVersion()}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all TV shows from Channels DVR library
|
||||
*/
|
||||
public async getShows(): Promise<ChannelsDVRShow[]> {
|
||||
try {
|
||||
const data = await this.get<ChannelsDVRShow[]>('/api/v1/shows');
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error('Failed to fetch shows from Channels DVR', {
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
});
|
||||
throw new Error('Failed to fetch shows from Channels DVR');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific show by ID
|
||||
*/
|
||||
public async getShow(showId: string): Promise<ChannelsDVRShow> {
|
||||
try {
|
||||
const data = await this.get<ChannelsDVRShow>(`/api/v1/shows/${showId}`);
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Failed to fetch show ${showId} from Channels DVR`,
|
||||
{
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
}
|
||||
);
|
||||
throw new Error(`Failed to fetch show ${showId} from Channels DVR`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all episodes for a specific show
|
||||
*/
|
||||
public async getShowEpisodes(showId: string): Promise<ChannelsDVREpisode[]> {
|
||||
try {
|
||||
const data = await this.get<ChannelsDVREpisode[]>(
|
||||
`/api/v1/shows/${showId}/episodes`
|
||||
);
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Failed to fetch episodes for show ${showId} from Channels DVR`,
|
||||
{
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
}
|
||||
);
|
||||
throw new Error(
|
||||
`Failed to fetch episodes for show ${showId} from Channels DVR`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all movies from Channels DVR library
|
||||
*/
|
||||
public async getMovies(): Promise<ChannelsDVRMovie[]> {
|
||||
try {
|
||||
const data = await this.get<ChannelsDVRMovie[]>('/api/v1/movies');
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error('Failed to fetch movies from Channels DVR', {
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
});
|
||||
throw new Error('Failed to fetch movies from Channels DVR');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific movie by ID
|
||||
*/
|
||||
public async getMovie(movieId: string): Promise<ChannelsDVRMovie> {
|
||||
try {
|
||||
const data = await this.get<ChannelsDVRMovie>(`/api/v1/movies/${movieId}`);
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Failed to fetch movie ${movieId} from Channels DVR`,
|
||||
{
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
}
|
||||
);
|
||||
throw new Error(`Failed to fetch movie ${movieId} from Channels DVR`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test connectivity to Channels DVR server
|
||||
*/
|
||||
public async testConnection(): Promise<boolean> {
|
||||
try {
|
||||
// Try to fetch shows list as a connectivity test
|
||||
await this.getShows();
|
||||
return true;
|
||||
} catch (e) {
|
||||
logger.error('Channels DVR connection test failed', {
|
||||
label: 'Channels DVR API',
|
||||
errorMessage: e.message,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default ChannelsDVRAPI;
|
||||
@@ -92,7 +92,7 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
apiKey,
|
||||
cacheName,
|
||||
apiName,
|
||||
timeout = 5000,
|
||||
timeout = 10000,
|
||||
}: {
|
||||
url: string;
|
||||
apiKey: string;
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum MediaServerType {
|
||||
PLEX = 1,
|
||||
JELLYFIN,
|
||||
EMBY,
|
||||
CHANNELS_DVR,
|
||||
NOT_CONFIGURED,
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ export class Blocklist implements BlocklistItem {
|
||||
@ManyToOne(() => User, (user) => user.id, {
|
||||
eager: true,
|
||||
})
|
||||
@Index()
|
||||
user?: User;
|
||||
|
||||
@OneToOne(() => Media, (media) => media.blocklist, {
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
AfterLoad,
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
OneToMany,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -19,6 +20,7 @@ class Issue {
|
||||
public id: number;
|
||||
|
||||
@Column({ type: 'int' })
|
||||
@Index()
|
||||
public issueType: IssueType;
|
||||
|
||||
@Column({ type: 'int', default: IssueStatus.OPEN })
|
||||
@@ -34,12 +36,14 @@ class Issue {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public media: Media;
|
||||
|
||||
@ManyToOne(() => User, (user) => user.createdIssues, {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public createdBy: User;
|
||||
|
||||
@ManyToOne(() => User, {
|
||||
@@ -47,6 +51,7 @@ class Issue {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: true,
|
||||
})
|
||||
@Index()
|
||||
public modifiedBy?: User;
|
||||
|
||||
@OneToMany(() => IssueComment, (comment) => comment.issue, {
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { DbAwareColumn } from '@server/utils/DbColumnHelper';
|
||||
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
} from 'typeorm';
|
||||
import Issue from './Issue';
|
||||
import { User } from './User';
|
||||
|
||||
@@ -12,11 +18,13 @@ class IssueComment {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public user: User;
|
||||
|
||||
@ManyToOne(() => Issue, (issue) => issue.comments, {
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public issue: Issue;
|
||||
|
||||
@Column({ type: 'text' })
|
||||
|
||||
@@ -206,6 +206,19 @@ class Media {
|
||||
Object.assign(this, init);
|
||||
}
|
||||
|
||||
public resetServiceData(): void {
|
||||
this.serviceId = null;
|
||||
this.serviceId4k = null;
|
||||
this.externalServiceId = null;
|
||||
this.externalServiceId4k = null;
|
||||
this.externalServiceSlug = null;
|
||||
this.externalServiceSlug4k = null;
|
||||
this.ratingKey = null;
|
||||
this.ratingKey4k = null;
|
||||
this.jellyfinMediaId = null;
|
||||
this.jellyfinMediaId4k = null;
|
||||
}
|
||||
|
||||
@AfterLoad()
|
||||
public setPlexUrls(): void {
|
||||
const { machineId, webAppUrl } = getSettings().plex;
|
||||
|
||||
@@ -521,12 +521,14 @@ export class MediaRequest {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public media: Media;
|
||||
|
||||
@ManyToOne(() => User, (user) => user.requests, {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public requestedBy: User;
|
||||
|
||||
@ManyToOne(() => User, {
|
||||
@@ -535,6 +537,7 @@ export class MediaRequest {
|
||||
eager: true,
|
||||
onDelete: 'SET NULL',
|
||||
})
|
||||
@Index()
|
||||
public modifiedBy?: User;
|
||||
|
||||
@DbAwareColumn({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { MediaStatus } from '@server/constants/media';
|
||||
import { DbAwareColumn } from '@server/utils/DbColumnHelper';
|
||||
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
} from 'typeorm';
|
||||
import Media from './Media';
|
||||
|
||||
@Entity()
|
||||
@@ -20,6 +26,7 @@ class Season {
|
||||
@ManyToOne(() => Media, (media) => media.seasons, {
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public media: Promise<Media>;
|
||||
|
||||
@DbAwareColumn({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { MediaRequestStatus } from '@server/constants/media';
|
||||
import { DbAwareColumn } from '@server/utils/DbColumnHelper';
|
||||
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
} from 'typeorm';
|
||||
import { MediaRequest } from './MediaRequest';
|
||||
|
||||
@Entity()
|
||||
@@ -17,6 +23,7 @@ class SeasonRequest {
|
||||
@ManyToOne(() => MediaRequest, (request) => request.seasons, {
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public request: MediaRequest;
|
||||
|
||||
@DbAwareColumn({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
|
||||
@@ -2,6 +2,7 @@ import { DbAwareColumn } from '@server/utils/DbColumnHelper';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
Unique,
|
||||
@@ -18,6 +19,7 @@ export class UserPushSubscription {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public user: User;
|
||||
|
||||
@Column()
|
||||
|
||||
@@ -47,12 +47,14 @@ export class Watchlist implements WatchlistItem {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public requestedBy: User;
|
||||
|
||||
@ManyToOne(() => Media, (media) => media.watchlists, {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
@Index()
|
||||
public media: Media;
|
||||
|
||||
@DbAwareColumn({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
|
||||
305
server/lib/scanners/channelsdvr/index.ts
Normal file
305
server/lib/scanners/channelsdvr/index.ts
Normal file
@@ -0,0 +1,305 @@
|
||||
import ChannelsDVRAPI, {
|
||||
type ChannelsDVRMovie,
|
||||
type ChannelsDVRShow,
|
||||
} from '@server/api/channelsdvr';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import { MediaServerType } from '@server/constants/server';
|
||||
import BaseScanner from '@server/lib/scanners/baseScanner';
|
||||
import type {
|
||||
ProcessableSeason,
|
||||
RunnableScanner,
|
||||
StatusBase,
|
||||
} from '@server/lib/scanners/baseScanner';
|
||||
import type { Library } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
|
||||
interface ChannelsDVRSyncStatus extends StatusBase {
|
||||
currentLibrary?: Library;
|
||||
libraries: Library[];
|
||||
}
|
||||
|
||||
class ChannelsDVRScanner
|
||||
extends BaseScanner<ChannelsDVRMovie | ChannelsDVRShow>
|
||||
implements RunnableScanner<ChannelsDVRSyncStatus>
|
||||
{
|
||||
private channelsClient: ChannelsDVRAPI;
|
||||
private libraries: Library[];
|
||||
private currentLibrary?: Library;
|
||||
private isRecentOnly = false;
|
||||
|
||||
constructor({ isRecentOnly }: { isRecentOnly?: boolean } = {}) {
|
||||
super('Channels DVR Sync');
|
||||
this.isRecentOnly = isRecentOnly ?? false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find TMDb ID for a movie by searching title and year
|
||||
*/
|
||||
private async findMovieTmdbId(
|
||||
title: string,
|
||||
releaseYear: number
|
||||
): Promise<number | null> {
|
||||
try {
|
||||
// Clean up title (remove year suffix if present)
|
||||
const cleanTitle = title.replace(/\s*\(\d{4}\)\s*$/, '').trim();
|
||||
|
||||
this.log(
|
||||
`Searching TMDb for movie: "${cleanTitle}" (${releaseYear})`,
|
||||
'debug'
|
||||
);
|
||||
|
||||
const searchResults = await this.tmdb.searchMovies({
|
||||
query: cleanTitle,
|
||||
page: 1,
|
||||
year: releaseYear,
|
||||
});
|
||||
|
||||
if (searchResults.results.length === 0) {
|
||||
this.log(
|
||||
`No TMDb results found for movie: "${cleanTitle}" (${releaseYear})`,
|
||||
'warn'
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Use the first result
|
||||
const tmdbId = searchResults.results[0].id;
|
||||
this.log(
|
||||
`Found TMDb ID ${tmdbId} for movie: "${cleanTitle}" (${releaseYear})`,
|
||||
'debug'
|
||||
);
|
||||
return tmdbId;
|
||||
} catch (e) {
|
||||
this.log(
|
||||
`Error searching TMDb for movie: "${title}" (${releaseYear})`,
|
||||
'error',
|
||||
{ errorMessage: e.message }
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find TMDb ID for a TV show by searching name and year
|
||||
*/
|
||||
private async findShowTmdbId(
|
||||
name: string,
|
||||
releaseYear: number
|
||||
): Promise<number | null> {
|
||||
try {
|
||||
this.log(`Searching TMDb for show: "${name}" (${releaseYear})`, 'debug');
|
||||
|
||||
const searchResults = await this.tmdb.searchTvShows({
|
||||
query: name,
|
||||
page: 1,
|
||||
firstAirDateYear: releaseYear,
|
||||
});
|
||||
|
||||
if (searchResults.results.length === 0) {
|
||||
this.log(
|
||||
`No TMDb results found for show: "${name}" (${releaseYear})`,
|
||||
'warn'
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Use the first result
|
||||
const tmdbId = searchResults.results[0].id;
|
||||
this.log(
|
||||
`Found TMDb ID ${tmdbId} for show: "${name}" (${releaseYear})`,
|
||||
'debug'
|
||||
);
|
||||
return tmdbId;
|
||||
} catch (e) {
|
||||
this.log(
|
||||
`Error searching TMDb for show: "${name}" (${releaseYear})`,
|
||||
'error',
|
||||
{ errorMessage: e.message }
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a Channels DVR movie
|
||||
*/
|
||||
private async processChannelsDVRMovie(movie: ChannelsDVRMovie) {
|
||||
try {
|
||||
// Find TMDb ID by searching title and year
|
||||
const tmdbId = await this.findMovieTmdbId(
|
||||
movie.title,
|
||||
movie.release_year
|
||||
);
|
||||
|
||||
if (!tmdbId) {
|
||||
this.log(
|
||||
`Skipping movie "${movie.title}" - could not find TMDb ID`,
|
||||
'warn'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Channels DVR doesn't provide resolution info in the API
|
||||
// We'll default to non-4K for now
|
||||
const mediaAddedAt = new Date(movie.created_at);
|
||||
|
||||
await this.processMovie(tmdbId, {
|
||||
is4k: false,
|
||||
mediaAddedAt,
|
||||
ratingKey: movie.id,
|
||||
title: movie.title,
|
||||
serviceId: this.channelsClient.baseUrl,
|
||||
externalServiceId: this.channelsClient.baseUrl,
|
||||
externalServiceSlug: 'channelsdvr',
|
||||
tmdbId: tmdbId,
|
||||
processing: !movie.completed,
|
||||
});
|
||||
|
||||
this.log(`Processed movie: ${movie.title} (TMDb ID: ${tmdbId})`, 'info');
|
||||
} catch (e) {
|
||||
this.log(
|
||||
`Error processing Channels DVR movie: ${movie.title}`,
|
||||
'error',
|
||||
{
|
||||
errorMessage: e.message,
|
||||
movieId: movie.id,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a Channels DVR TV show
|
||||
*/
|
||||
private async processChannelsDVRShow(show: ChannelsDVRShow) {
|
||||
try {
|
||||
// Find TMDb ID by searching name and year
|
||||
const tmdbId = await this.findShowTmdbId(show.name, show.release_year);
|
||||
|
||||
if (!tmdbId) {
|
||||
this.log(
|
||||
`Skipping show "${show.name}" - could not find TMDb ID`,
|
||||
'warn'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const mediaAddedAt = new Date(show.created_at);
|
||||
|
||||
// Fetch all episodes for the show from Channels DVR
|
||||
const episodes = await this.channelsClient.getShowEpisodes(show.id);
|
||||
|
||||
// Group episodes by season
|
||||
const seasonMap = new Map<number, ProcessableSeason>();
|
||||
|
||||
for (const episode of episodes) {
|
||||
const seasonNumber = episode.season_number;
|
||||
const episodeNumber = episode.episode_number;
|
||||
|
||||
if (!seasonMap.has(seasonNumber)) {
|
||||
seasonMap.set(seasonNumber, {
|
||||
seasonNumber,
|
||||
episodes: [],
|
||||
});
|
||||
}
|
||||
|
||||
const season = seasonMap.get(seasonNumber)!;
|
||||
season.episodes.push({
|
||||
episodeNumber,
|
||||
ratingKey: episode.id,
|
||||
mediaAddedAt: new Date(episode.created_at),
|
||||
processing: !episode.completed,
|
||||
});
|
||||
}
|
||||
|
||||
const seasons = Array.from(seasonMap.values());
|
||||
|
||||
await this.processTvShow(tmdbId, {
|
||||
seasons,
|
||||
ratingKey: show.id,
|
||||
title: show.name,
|
||||
serviceId: this.channelsClient.baseUrl,
|
||||
externalServiceId: this.channelsClient.baseUrl,
|
||||
externalServiceSlug: 'channelsdvr',
|
||||
});
|
||||
|
||||
this.log(
|
||||
`Processed show: ${show.name} (TMDb ID: ${tmdbId}, ${episodes.length} episodes)`,
|
||||
'info'
|
||||
);
|
||||
} catch (e) {
|
||||
this.log(
|
||||
`Error processing Channels DVR show: ${show.name}`,
|
||||
'error',
|
||||
{
|
||||
errorMessage: e.message,
|
||||
showId: show.id,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public async run(): Promise<void> {
|
||||
const settings = getSettings();
|
||||
const sessionManager = settings.main.sessionManager;
|
||||
|
||||
if (!settings.channelsdvr.url) {
|
||||
this.log('Channels DVR URL not configured, skipping scan', 'warn');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.channelsClient = new ChannelsDVRAPI(settings.channelsdvr.url);
|
||||
|
||||
// Test connection
|
||||
const connected = await this.channelsClient.testConnection();
|
||||
if (!connected) {
|
||||
throw new Error('Failed to connect to Channels DVR server');
|
||||
}
|
||||
|
||||
this.log('Successfully connected to Channels DVR', 'info');
|
||||
|
||||
// Fetch and process all movies
|
||||
this.log('Fetching movies from Channels DVR...', 'info');
|
||||
const movies = await this.channelsClient.getMovies();
|
||||
this.log(`Found ${movies.length} movies`, 'info');
|
||||
|
||||
for (const movie of movies) {
|
||||
await this.processChannelsDVRMovie(movie);
|
||||
}
|
||||
|
||||
// Fetch and process all TV shows
|
||||
this.log('Fetching TV shows from Channels DVR...', 'info');
|
||||
const shows = await this.channelsClient.getShows();
|
||||
this.log(`Found ${shows.length} TV shows`, 'info');
|
||||
|
||||
for (const show of shows) {
|
||||
await this.processChannelsDVRShow(show);
|
||||
}
|
||||
|
||||
this.log('Channels DVR sync completed', 'info');
|
||||
} catch (e) {
|
||||
this.log('Channels DVR sync failed', 'error', {
|
||||
errorMessage: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async cancel(): Promise<void> {
|
||||
this.cancelled = true;
|
||||
}
|
||||
|
||||
public status(): ChannelsDVRSyncStatus {
|
||||
return {
|
||||
running: this.running,
|
||||
progress: 0,
|
||||
total: 0,
|
||||
currentLibrary: this.currentLibrary,
|
||||
libraries: this.libraries ?? [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default ChannelsDVRScanner;
|
||||
@@ -49,6 +49,13 @@ export interface JellyfinSettings {
|
||||
serverId: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
export interface ChannelsDVRSettings {
|
||||
name: string;
|
||||
url: string;
|
||||
libraries: Library[];
|
||||
}
|
||||
|
||||
export interface TautulliSettings {
|
||||
hostname?: string;
|
||||
port?: number;
|
||||
@@ -355,6 +362,7 @@ export interface AllSettings {
|
||||
main: MainSettings;
|
||||
plex: PlexSettings;
|
||||
jellyfin: JellyfinSettings;
|
||||
channelsdvr: ChannelsDVRSettings;
|
||||
tautulli: TautulliSettings;
|
||||
radarr: RadarrSettings[];
|
||||
sonarr: SonarrSettings[];
|
||||
@@ -423,6 +431,11 @@ class Settings {
|
||||
serverId: '',
|
||||
apiKey: '',
|
||||
},
|
||||
channelsdvr: {
|
||||
name: 'Channels DVR',
|
||||
url: '',
|
||||
libraries: [],
|
||||
},
|
||||
tautulli: {},
|
||||
metadataSettings: {
|
||||
tv: MetadataProviderType.TMDB,
|
||||
|
||||
@@ -45,7 +45,7 @@ class WatchlistSync {
|
||||
[
|
||||
Permission.AUTO_REQUEST,
|
||||
Permission.AUTO_REQUEST_MOVIE,
|
||||
Permission.AUTO_APPROVE_TV,
|
||||
Permission.AUTO_REQUEST_TV,
|
||||
],
|
||||
{ type: 'or' }
|
||||
)
|
||||
@@ -70,13 +70,33 @@ class WatchlistSync {
|
||||
response.items.map((i) => i.tmdbId)
|
||||
);
|
||||
|
||||
const watchlistTmdbIds = response.items.map((i) => i.tmdbId);
|
||||
|
||||
const requestRepository = getRepository(MediaRequest);
|
||||
const existingAutoRequests = await requestRepository
|
||||
.createQueryBuilder('request')
|
||||
.leftJoinAndSelect('request.media', 'media')
|
||||
.where('request.requestedBy = :userId', { userId: user.id })
|
||||
.andWhere('request.isAutoRequest = true')
|
||||
.andWhere('media.tmdbId IN (:...tmdbIds)', { tmdbIds: watchlistTmdbIds })
|
||||
.getMany();
|
||||
|
||||
const autoRequestedTmdbIds = new Set(
|
||||
existingAutoRequests
|
||||
.filter((r) => r.media != null)
|
||||
.map((r) => `${r.media.mediaType}:${r.media.tmdbId}`)
|
||||
);
|
||||
|
||||
const unavailableItems = response.items.filter(
|
||||
// If we can find watchlist items in our database that are also available, we should exclude them
|
||||
(i) =>
|
||||
!autoRequestedTmdbIds.has(
|
||||
`${i.type === 'show' ? MediaType.TV : MediaType.MOVIE}:${i.tmdbId}`
|
||||
) &&
|
||||
!mediaItems.find(
|
||||
(m) =>
|
||||
m.tmdbId === i.tmdbId &&
|
||||
((m.status !== MediaStatus.UNKNOWN && m.mediaType === 'movie') ||
|
||||
(m.status === MediaStatus.BLOCKLISTED ||
|
||||
(m.status !== MediaStatus.UNKNOWN && m.mediaType === 'movie') ||
|
||||
(m.mediaType === 'tv' && m.status === MediaStatus.AVAILABLE))
|
||||
)
|
||||
);
|
||||
|
||||
153
server/migration/postgres/1771259406751-AddForeignKeyIndexes.ts
Normal file
153
server/migration/postgres/1771259406751-AddForeignKeyIndexes.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import type { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddForeignKeyIndexes1771259406751 implements MigrationInterface {
|
||||
name = 'AddForeignKeyIndexes1771259406751';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" DROP CONSTRAINT "FK_53c1ab62c3e5875bc3ac474823e"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" DROP CONSTRAINT "FK_62b7ade94540f9f8d8bede54b99"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_6bbafa28411e6046421991ea21"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE SEQUENCE IF NOT EXISTS "blocklist_id_seq" OWNED BY "blocklist"."id"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ALTER COLUMN "id" SET DEFAULT nextval('"blocklist_id_seq"')`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ALTER COLUMN "id" DROP DEFAULT`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_ae34e6b153a90672eb9dc4857d" ON "watchlist" ("requestedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6641da8d831b93dfcb429f8b8b" ON "watchlist" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_707b033c2d0653f75213614789" ON "issue_comment" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_180710fead1c94ca499c57a7d4" ON "issue_comment" ("issueId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_53d04c07c3f4f54eae372ed665" ON "issue" ("issueType") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_276e20d053f3cff1645803c95d" ON "issue" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_10b17b49d1ee77e7184216001e" ON "issue" ("createdById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_da88a1019c850d1a7b143ca02e" ON "issue" ("modifiedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6f14737e346d6b27d8e50d2157" ON "season_request" ("requestId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_a1aa713f41c99e9d10c48da75a" ON "media_request" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6997bee94720f1ecb7f3113709" ON "media_request" ("requestedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_f4fc4efa14c3ba2b29c4525fa1" ON "media_request" ("modifiedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_03f7958328e311761b0de675fb" ON "user_push_subscription" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_09b94c932e84635c5461f3c0a9" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_356721a49f145aa439c16e6b99" ON "blocklist" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_087099b39600be695591da9a49" ON "season" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ADD CONSTRAINT "FK_356721a49f145aa439c16e6b999" FOREIGN KEY ("userId") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ADD CONSTRAINT "FK_5c8af2d0e83b3be6d250eccc19d" FOREIGN KEY ("mediaId") REFERENCES "media"("id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" DROP CONSTRAINT "FK_5c8af2d0e83b3be6d250eccc19d"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" DROP CONSTRAINT "FK_356721a49f145aa439c16e6b999"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_087099b39600be695591da9a49"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_356721a49f145aa439c16e6b99"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_09b94c932e84635c5461f3c0a9"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_03f7958328e311761b0de675fb"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_f4fc4efa14c3ba2b29c4525fa1"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_6997bee94720f1ecb7f3113709"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_a1aa713f41c99e9d10c48da75a"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_6f14737e346d6b27d8e50d2157"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_da88a1019c850d1a7b143ca02e"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_10b17b49d1ee77e7184216001e"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_276e20d053f3cff1645803c95d"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_53d04c07c3f4f54eae372ed665"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_180710fead1c94ca499c57a7d4"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_707b033c2d0653f75213614789"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_6641da8d831b93dfcb429f8b8b"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_ae34e6b153a90672eb9dc4857d"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ALTER COLUMN "id" SET DEFAULT nextval('blacklist_id_seq')`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ALTER COLUMN "id" DROP DEFAULT`
|
||||
);
|
||||
await queryRunner.query(`DROP SEQUENCE "blocklist_id_seq"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6bbafa28411e6046421991ea21" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ADD CONSTRAINT "FK_62b7ade94540f9f8d8bede54b99" FOREIGN KEY ("mediaId") REFERENCES "media"("id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" ADD CONSTRAINT "FK_53c1ab62c3e5875bc3ac474823e" FOREIGN KEY ("userId") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`
|
||||
);
|
||||
}
|
||||
}
|
||||
203
server/migration/sqlite/1771259394105-AddForeignKeyIndexes.ts
Normal file
203
server/migration/sqlite/1771259394105-AddForeignKeyIndexes.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import type { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddForeignKeyIndexes1771259394105 implements MigrationInterface {
|
||||
name = 'AddForeignKeyIndexes1771259394105';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX "IDX_6bbafa28411e6046421991ea21"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "blocklist"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_blocklist" RENAME TO "blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6bbafa28411e6046421991ea21" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6bbafa28411e6046421991ea21"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user_push_subscription" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "endpoint" varchar NOT NULL, "p256dh" varchar NOT NULL, "auth" varchar NOT NULL, "userId" integer, "userAgent" varchar, "createdAt" datetime DEFAULT (CURRENT_TIMESTAMP), CONSTRAINT "UQ_6427d07d9a171a3a1ab87480005" UNIQUE ("endpoint", "userId"), CONSTRAINT "UQ_f90ab5a4ed54905a4bb51a7148b" UNIQUE ("auth"), CONSTRAINT "FK_03f7958328e311761b0de675fbe" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_user_push_subscription"("id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt") SELECT "id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt" FROM "user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "user_push_subscription"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_user_push_subscription" RENAME TO "user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user_push_subscription" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "endpoint" varchar NOT NULL, "p256dh" varchar NOT NULL, "auth" varchar NOT NULL, "userId" integer, "userAgent" varchar, "createdAt" datetime DEFAULT (CURRENT_TIMESTAMP), CONSTRAINT "UQ_6427d07d9a171a3a1ab87480005" UNIQUE ("endpoint", "userId"), CONSTRAINT "UQ_f90ab5a4ed54905a4bb51a7148b" UNIQUE ("auth"), CONSTRAINT "FK_03f7958328e311761b0de675fbe" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_user_push_subscription"("id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt") SELECT "id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt" FROM "user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "user_push_subscription"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_user_push_subscription" RENAME TO "user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (CURRENT_TIMESTAMP), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "blocklist"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_blocklist" RENAME TO "blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_ae34e6b153a90672eb9dc4857d" ON "watchlist" ("requestedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6641da8d831b93dfcb429f8b8b" ON "watchlist" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_707b033c2d0653f75213614789" ON "issue_comment" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_180710fead1c94ca499c57a7d4" ON "issue_comment" ("issueId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_53d04c07c3f4f54eae372ed665" ON "issue" ("issueType") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_276e20d053f3cff1645803c95d" ON "issue" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_10b17b49d1ee77e7184216001e" ON "issue" ("createdById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_da88a1019c850d1a7b143ca02e" ON "issue" ("modifiedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6f14737e346d6b27d8e50d2157" ON "season_request" ("requestId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_a1aa713f41c99e9d10c48da75a" ON "media_request" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6997bee94720f1ecb7f3113709" ON "media_request" ("requestedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_f4fc4efa14c3ba2b29c4525fa1" ON "media_request" ("modifiedById") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_03f7958328e311761b0de675fb" ON "user_push_subscription" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_09b94c932e84635c5461f3c0a9" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_356721a49f145aa439c16e6b99" ON "blocklist" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_087099b39600be695591da9a49" ON "season" ("mediaId") `
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_09b94c932e84635c5461f3c0a9"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_356721a49f145aa439c16e6b99"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (CURRENT_TIMESTAMP), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"), CONSTRAINT "FK_356721a49f145aa439c16e6b999" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT "FK_5c8af2d0e83b3be6d250eccc19d" FOREIGN KEY ("mediaId") REFERENCES "media" ("id") ON DELETE CASCADE ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "blocklist"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_blocklist" RENAME TO "blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_09b94c932e84635c5461f3c0a9" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_356721a49f145aa439c16e6b99" ON "blocklist" ("userId") `
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX "IDX_356721a49f145aa439c16e6b99"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_09b94c932e84635c5461f3c0a9"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" RENAME TO "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (CURRENT_TIMESTAMP), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_blocklist"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_356721a49f145aa439c16e6b99" ON "blocklist" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_09b94c932e84635c5461f3c0a9" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_087099b39600be695591da9a49"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_356721a49f145aa439c16e6b99"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_09b94c932e84635c5461f3c0a9"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_03f7958328e311761b0de675fb"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_f4fc4efa14c3ba2b29c4525fa1"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6997bee94720f1ecb7f3113709"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_a1aa713f41c99e9d10c48da75a"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6f14737e346d6b27d8e50d2157"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_da88a1019c850d1a7b143ca02e"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_10b17b49d1ee77e7184216001e"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_276e20d053f3cff1645803c95d"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_53d04c07c3f4f54eae372ed665"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_180710fead1c94ca499c57a7d4"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_707b033c2d0653f75213614789"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6641da8d831b93dfcb429f8b8b"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_ae34e6b153a90672eb9dc4857d"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" RENAME TO "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_blocklist"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_push_subscription" RENAME TO "temporary_user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "user_push_subscription" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "endpoint" varchar NOT NULL, "p256dh" varchar NOT NULL, "auth" varchar NOT NULL, "userId" integer, "userAgent" varchar, "createdAt" datetime DEFAULT (CURRENT_TIMESTAMP), CONSTRAINT "UQ_6427d07d9a171a3a1ab87480005" UNIQUE ("endpoint", "userId"), CONSTRAINT "UQ_f90ab5a4ed54905a4bb51a7148b" UNIQUE ("auth"), CONSTRAINT "FK_03f7958328e311761b0de675fbe" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "user_push_subscription"("id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt") SELECT "id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt" FROM "temporary_user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_user_push_subscription"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_push_subscription" RENAME TO "temporary_user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "user_push_subscription" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "endpoint" varchar NOT NULL, "p256dh" varchar NOT NULL, "auth" varchar NOT NULL, "userId" integer, "userAgent" varchar, "createdAt" datetime DEFAULT (CURRENT_TIMESTAMP), CONSTRAINT "UQ_6427d07d9a171a3a1ab87480005" UNIQUE ("endpoint", "userId"), CONSTRAINT "UQ_f90ab5a4ed54905a4bb51a7148b" UNIQUE ("auth"), CONSTRAINT "FK_03f7958328e311761b0de675fbe" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "user_push_subscription"("id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt") SELECT "id", "endpoint", "p256dh", "auth", "userId", "userAgent", "createdAt" FROM "temporary_user_push_subscription"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_user_push_subscription"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6bbafa28411e6046421991ea21" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6bbafa28411e6046421991ea21"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "blocklist" RENAME TO "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "blocklist" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "mediaType" varchar NOT NULL, "title" varchar, "tmdbId" integer NOT NULL, "blocklistedTags" varchar, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "userId" integer, "mediaId" integer, CONSTRAINT "REL_62b7ade94540f9f8d8bede54b9" UNIQUE ("mediaId"), CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId"), CONSTRAINT "FK_62b7ade94540f9f8d8bede54b99" FOREIGN KEY ("mediaId") REFERENCES "media" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_53c1ab62c3e5875bc3ac474823e" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "blocklist"("id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId") SELECT "id", "mediaType", "title", "tmdbId", "blocklistedTags", "createdAt", "userId", "mediaId" FROM "temporary_blocklist"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_blocklist"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6bbafa28411e6046421991ea21" ON "blocklist" ("tmdbId") `
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -174,7 +174,12 @@ mediaRoutes.delete(
|
||||
where: { id: Number(req.params.id) },
|
||||
});
|
||||
|
||||
await mediaRepository.remove(media);
|
||||
if (media.status === MediaStatus.BLOCKLISTED) {
|
||||
media.resetServiceData();
|
||||
await mediaRepository.save(media);
|
||||
} else {
|
||||
await mediaRepository.remove(media);
|
||||
}
|
||||
|
||||
return res.status(204).send();
|
||||
} catch (e) {
|
||||
|
||||
Reference in New Issue
Block a user