Compare commits

..

1 Commits

Author SHA1 Message Date
gauthier-th
637712e4fc feat: add script for SQLite to PostgreSQL migration 2026-02-18 14:50:40 +01:00
33 changed files with 220 additions and 483 deletions

View File

@@ -1,7 +1,6 @@
name: 🐛 Bug Report
description: Report a problem
labels: ['awaiting triage']
type: bug
labels: ['bug', 'awaiting triage']
body:
- type: markdown
attributes:

View File

@@ -1,63 +0,0 @@
name: 📚 Documentation
description: Report a docs problem or suggest a docs improvement
title: "[Docs]: "
labels: ["documentation", "awaiting triage"]
type: task
body:
- type: markdown
attributes:
value: |
Thanks for helping improve the docs!
Use this template for documentation issues (typos, unclear steps, missing info, outdated screenshots).
For app bugs or feature ideas, please use the other templates.
- type: input
id: doc-location
attributes:
label: Page / Location
description: Link to the docs page or the file/path (e.g. https://docs.seerr.dev/... or README.md)
placeholder: "https://docs.seerr.dev/..."
validations:
required: true
- type: dropdown
id: doc-area
attributes:
label: Docs Area
options:
- docs site
- migration guide
- README / repo docs
- API / integrations
- other
validations:
required: true
- type: textarea
id: problem
attributes:
label: Whats wrong / missing?
description: Describe the issue in the docs.
validations:
required: true
- type: textarea
id: suggested-fix
attributes:
label: Suggested change
description: If you know what should be changed, describe it (or paste proposed wording).
validations:
required: false
- type: checkboxes
id: search-existing
attributes:
label: Search Existing Issues
description: Have you searched existing issues to see if this has already been reported?
options:
- label: Yes, I have searched existing issues.
required: true
- type: checkboxes
id: terms
attributes:
label: Code of Conduct
description: By submitting this issue, you agree to follow our Code of Conduct.
options:
- label: I agree to follow Seerr's [Code of Conduct](https://github.com/seerr-team/seerr/blob/develop/CODE_OF_CONDUCT.md).
required: true

View File

@@ -1,7 +1,6 @@
name: ✨ Feature Request
description: Suggest an idea
labels: ['awaiting triage']
type: feature
labels: ['enhancement', 'awaiting triage']
body:
- type: markdown
attributes:

View File

@@ -1,65 +0,0 @@
name: 🧰 Maintenance / Chore
description: CI, GitHub Actions, build, dependencies, refactors (non-feature work)
title: "[Chore]: "
labels: ["maintenance", "awaiting triage"]
type: task
body:
- type: markdown
attributes:
value: |
Maintainers / contributors: use this for internal tasks (CI, workflows, tooling, refactors).
If you're reporting a user-facing bug or requesting a feature, use the other templates.
- type: dropdown
id: area
attributes:
label: Area
options:
- CI / GitHub Actions
- build / packaging
- dependencies
- release process
- refactor / tech debt
- tooling / scripts
- other
validations:
required: true
- type: textarea
id: summary
attributes:
label: Summary
description: What needs doing and why?
validations:
required: true
- type: textarea
id: acceptance
attributes:
label: Acceptance criteria
description: What does "done" look like?
placeholder: |
- [ ] ...
- [ ] ...
validations:
required: false
- type: input
id: related
attributes:
label: Related links
description: PRs, failing workflow runs, logs, or relevant issues.
validations:
required: false
- type: checkboxes
id: search-existing
attributes:
label: Search Existing Issues
description: Have you searched existing issues to see if this has already been reported?
options:
- label: Yes, I have searched existing issues.
required: true
- type: checkboxes
id: terms
attributes:
label: Code of Conduct
description: By submitting this issue, you agree to follow our Code of Conduct.
options:
- label: I agree to follow Seerr's [Code of Conduct](https://github.com/seerr-team/seerr/blob/develop/CODE_OF_CONDUCT.md).
required: true

View File

@@ -18,7 +18,7 @@ env:
DOCKER_HUB: seerr/seerr
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: ci-${{ github.ref }}
cancel-in-progress: true
jobs:
@@ -129,7 +129,7 @@ jobs:
build:
name: Build (per-arch, native runners)
if: github.ref == 'refs/heads/develop'
if: github.ref == 'refs/heads/develop' && !contains(github.event.head_commit.message, '[skip ci]')
strategy:
matrix:
include:
@@ -237,7 +237,7 @@ jobs:
discord:
name: Send Discord Notification
needs: publish
if: always() && github.event_name != 'pull_request'
if: always() && github.event_name != 'pull_request' && !contains(github.event.head_commit.message, '[skip ci]')
runs-on: ubuntu-24.04
steps:
- name: Determine Workflow Status

View File

@@ -20,7 +20,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: codeql-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -14,7 +14,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: merge-conflict-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -1,87 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Create tag
on:
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
determine-tag-version:
name: Determine tag version
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-24.04
permissions:
contents: read
outputs:
tag_version: ${{ steps.git-cliff.outputs.tag_version }}
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
fetch-depth: 0
persist-credentials: false
- name: Install git-cliff
uses: taiki-e/install-action@cede0bb282aae847dfa8aacca3a41c86d973d4d7 # v2.68.1
with:
tool: git-cliff
- name: Get tag version
id: git-cliff
run: |
tag_version=$(git-cliff -c .github/cliff.toml --bumped-version --unreleased)
echo "Next tag version is ${tag_version}"
echo "tag_version=${tag_version}" >> "$GITHUB_OUTPUT"
create-tag:
name: Create tag
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-24.04
permissions:
contents: write
needs: determine-tag-version
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAG_VERSION: ${{ needs.determine-tag-version.outputs.tag_version }}
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
ssh-key: '${{ secrets.COMMIT_KEY }}'
- name: Pnpm Setup
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Set up Node.js
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: 'package.json'
# For workflows with elevated privileges we recommend disabling automatic caching.
# https://github.com/actions/setup-node
package-manager-cache: false
- name: Configure git
run: |
git config --global user.name "${{ github.actor }}"
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
- name: Bump package.json
run: npm version ${TAG_VERSION} --no-commit-hooks --no-git-tag-version
- name: Commit updated files
run: |
git add package.json
git commit -m 'chore(release): prepare ${TAG_VERSION}'
git push
- name: Create git tag
run: |
git tag ${TAG_VERSION}
git push origin ${TAG_VERSION}

View File

@@ -28,7 +28,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: cypress-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -15,7 +15,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: pages
cancel-in-progress: true
jobs:

View File

@@ -25,7 +25,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: docs-link-check-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -14,7 +14,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: helm-charts
cancel-in-progress: true
jobs:

View File

@@ -18,7 +18,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: charts-lint-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -15,7 +15,7 @@ env:
DOCKER_HUB: seerr/seerr
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: preview-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -11,7 +11,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: release-${{ github.ref }}
cancel-in-progress: true
env:

View File

@@ -12,7 +12,7 @@ on:
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: renovate-helm-hooks-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -1,28 +0,0 @@
name: "Semantic PR"
on:
pull_request_target:
types:
- opened
- reopened
- edited
- synchronize
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
main:
name: Validate PR Title
runs-on: ubuntu-slim
permissions:
contents: read
pull-requests: read
checks: write
steps:
- uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -9,7 +9,7 @@ on:
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: close-stale-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -14,7 +14,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: docs-pr-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -16,7 +16,7 @@ permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
group: trivy-scan-${{ github.ref }}
cancel-in-progress: true
jobs:

View File

@@ -122,7 +122,7 @@ Steps:
- If you are taking on an existing bug or feature ticket, please comment on the [issue](/../../issues) to avoid multiple people working on the same thing.
- All commits **must** follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/)
- Pull requests with titles or commits not following this standard will **not** be merged. PR titles are automatically checked for compliance.
- Pull requests with commits not following this standard will **not** be merged.
- Please make meaningful commits, or squash them prior to opening a pull request.
- Do not squash commits once people have begun reviewing your changes.
- Always rebase your commit to the latest `develop` branch. Do **not** merge `develop` into your branch.

View File

@@ -30,7 +30,7 @@ If your PostgreSQL server is configured to accept TCP connections, you can speci
```dotenv
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite.
DB_HOST=localhost # (optional) The host (URL) of the database. The default is "localhost".
DB_HOST="localhost" # (optional) The host (URL) of the database. The default is "localhost".
DB_PORT="5432" # (optional) The port to connect to. The default is "5432".
DB_USER= # (required) Username used to connect to the database.
DB_PASS= # (required) Password of the user used to connect to the database.

View File

@@ -1,111 +0,0 @@
---
title: Synology (Advanced)
description: Install Seerr on Synology NAS using SynoCommunity
sidebar_position: 5
---
# Synology
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
:::warning
This method is not recommended for most users. It is intended for advanced users who are using Synology NAS.
:::
## Prerequisites
- Synology NAS running **DSM 7.2** or later
- 64-bit architecture (x86_64 or ARMv8)
- [SynoCommunity package source](https://synocommunity.com/) added to Package Center
## Adding the SynoCommunity Package Source
If you haven't already added SynoCommunity to your Package Center:
1. Open **Package Center** in DSM
2. Click **Settings** in the top-right corner
3. Go to the **Package Sources** tab
4. Click **Add**
5. Enter the following:
- **Name**: `SynoCommunity`
- **Location**: `https://packages.synocommunity.com`
6. Click **OK**
## Installation
1. In **Package Center**, search for **Seerr**
2. Click **Install**
3. Follow the installation wizard prompts
4. Package Center will automatically install any required dependencies (Node.js v22)
### Access Seerr
Once installed, access Seerr at:
```
http://<your-synology-ip>:5055
```
You can also click the **Open** button in Package Center or find Seerr in the DSM main menu.
## Configuration
Seerr's configuration files are stored at:
```
/var/packages/seerr/var/config
```
:::info
The Seerr package runs as a dedicated service user managed by DSM. No manual permission configuration is required.
:::
## Managing the Service
You can start, stop, and restart Seerr from **Package Center** → Find Seerr → Use the action buttons.
## Updating
When a new version is available:
1. Open **Package Center**
2. Go to **Installed** packages
3. Find **Seerr** and click **Update** if available
:::tip
Enable automatic updates in Package Center settings to keep Seerr up to date.
:::
## Troubleshooting
### Viewing Logs
Seerr logs are located at `/var/packages/seerr/var/config/logs` and can be accessed using:
- **File Browser** package (recommended for most users)
- SSH (advanced users)
### Port Conflicts
Seerr uses port 5055. If this port is already in use:
- **Docker containers**: Remap the conflicting container to a different port
- **Other packages**: The conflicting package will need to be uninstalled as Seerr's port cannot be changed
SynoCommunity ensures there are no port conflicts with other SynoCommunity packages or official Synology packages.
### Package Won't Start
Ensure Node.js v22 is installed and running by checking its status in **Package Center**.
## Uninstallation
1. Open **Package Center**
2. Find **Seerr** in your installed packages
3. Click **Uninstall**
:::caution
Uninstalling will remove the application but preserve your configuration data by default. Select "Remove data" during uninstallation if you want a complete removal.
:::

View File

@@ -4,6 +4,12 @@ description: Install Seerr using TrueNAS
sidebar_position: 4
---
# TrueNAS
:::danger
This method has not yet been updated for Seerr and is currently a work in progress.
You can follow the ongoing work on this issue https://github.com/truenas/apps/issues/3374.
:::
<!--
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
@@ -11,7 +17,4 @@ Third-party installation methods are maintained by the community. The Seerr team
:::warning
This method is not recommended for most users. It is intended for advanced users who are using TrueNAS distribution.
:::
## Installation
Go to the 'Apps' menu, click the 'Discover Apps' button in the top right, search for 'Seerr' in the search bar, and install the app.
-->

View File

@@ -210,42 +210,7 @@ See https://aur.archlinux.org/packages/seerr
### TrueNAS
Refer to [Seerr TrueNAS Documentation](/getting-started/third-parties/truenas), all of our examples have been updated to reflect the below change.
<Tabs groupId="truenas-migration" queryString>
<TabItem value="hostpath" label="Host Path">
**This guide describes how to migrate from Host Path storage (not ixVolume).**
1. Stop Jellyseerr/Overseerr
2. Install Seerr and use the same Host Path storage that was used by Jellyseerr/Overseerr
3. Start Seerr app
4. Delete Jellyseerr/Overseerr app
</TabItem>
<TabItem value="ixvolume" label="ixVolume">
**This guide describes how to migrate from ixVolume storage (not Host Path).**
1. Stop Jellyseerr/Overseerr
2. Create a dataset for Seerr
If your apps normally store data under something like:
```
/mnt/storage/<app-name>
```
then create a dataset named:
```
storage/seerr
```
resulting in:
```
/mnt/storage/seerr
```
3. Copy ixVolume Data
Open System Settings → Shell, or SSH into your TrueNAS server as root and run :
```bash
rsync -av /mnt/.ix-apps/app_mounts/jellyseerr/ /mnt/storage/seerr/
```
4. Install Seerr and use the same Host Path storage that was created before (`/mnt/storage/seerr/config` in our example)
5. Start Seerr app
6. Delete Jellyseerr/Overseerr app
</TabItem>
</Tabs>
Waiting for https://github.com/truenas/apps/issues/3374
### Unraid
@@ -312,4 +277,4 @@ For Jellyseerr users, use `/mnt/user/appdata/jellyseerr`.
:::tip
If you are using a reverse proxy (such as SWAG or Nginx Proxy Manager), update your proxy configuration to point to the new container name `seerr`. The default port remains `5055`.
:::
:::

View File

@@ -19,7 +19,7 @@ Please check how to migrate to Seerr in our [migration guide](https://docs.seerr
Seerr brings several features that were previously available in Jellyseerr but missing from Overseerr. These additions improve flexibility, performance, and overall control for admins and power users:
* **Alternative media solution:** Added support for Jellyfin and Emby as alternatives to Plex. Only one integration can be used at a time.
* **Alternative media solution:** Added support for Jellyfin and Emby in addition to the existing Plex integration.
* **PostgreSQL support**: In addition to SQLite, you can now opt in to using a PostgreSQL database.
* **Blocklist for movies, series, and tags**: Allows permitted users to hide movies, series, or tags from regular users.
* **Override rules**: Adjust default request settings based on conditions such as user, tag, or other criteria.

View File

@@ -16,12 +16,7 @@ const config: Config = {
deploymentBranch: 'gh-pages',
onBrokenLinks: 'throw',
markdown: {
hooks: {
onBrokenMarkdownLinks: 'warn',
},
},
onBrokenMarkdownLinks: 'warn',
i18n: {
defaultLocale: 'en',

View File

@@ -24,7 +24,8 @@
"prepare": "node bin/prepare.js",
"cypress:open": "cypress open",
"cypress:prepare": "ts-node -r tsconfig-paths/register --files --project server/tsconfig.json server/scripts/prepareTestDb.ts",
"cypress:build": "pnpm build && pnpm cypress:prepare"
"cypress:build": "pnpm build && pnpm cypress:prepare",
"db:migratetopostgres": "pnpm build:server && node dist/scripts/sqliteToPostgres.js"
},
"repository": {
"type": "git",

View File

@@ -206,19 +206,6 @@ class Media {
Object.assign(this, init);
}
public resetServiceData(): void {
this.serviceId = null;
this.serviceId4k = null;
this.externalServiceId = null;
this.externalServiceId4k = null;
this.externalServiceSlug = null;
this.externalServiceSlug4k = null;
this.ratingKey = null;
this.ratingKey4k = null;
this.jellyfinMediaId = null;
this.jellyfinMediaId4k = null;
}
@AfterLoad()
public setPlexUrls(): void {
const { machineId, webAppUrl } = getSettings().plex;

View File

@@ -1,12 +1,6 @@
import { getMetadataProvider } from '@server/api/metadata';
import type { SonarrSeries } from '@server/api/servarr/sonarr';
import SonarrAPI from '@server/api/servarr/sonarr';
import TheMovieDb from '@server/api/themoviedb';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
import type {
TmdbKeyword,
TmdbTvDetails,
} from '@server/api/themoviedb/interfaces';
import type { TmdbTvDetails } from '@server/api/themoviedb/interfaces';
import { getRepository } from '@server/datasource';
import Media from '@server/entity/Media';
import type {
@@ -108,15 +102,6 @@ class SonarrScanner
}
const tmdbId = tvShow.id;
const metadataProvider = tvShow.keywords?.results?.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
)
? await getMetadataProvider('anime')
: await getMetadataProvider('tv');
if (!(metadataProvider instanceof TheMovieDb)) {
tvShow = await metadataProvider.getTvShow({ tvId: tmdbId });
}
const settings = getSettings();
const filteredSeasons = sonarrSeries.seasons.filter(

View File

@@ -45,7 +45,7 @@ class WatchlistSync {
[
Permission.AUTO_REQUEST,
Permission.AUTO_REQUEST_MOVIE,
Permission.AUTO_REQUEST_TV,
Permission.AUTO_APPROVE_TV,
],
{ type: 'or' }
)
@@ -70,33 +70,13 @@ class WatchlistSync {
response.items.map((i) => i.tmdbId)
);
const watchlistTmdbIds = response.items.map((i) => i.tmdbId);
const requestRepository = getRepository(MediaRequest);
const existingAutoRequests = await requestRepository
.createQueryBuilder('request')
.leftJoinAndSelect('request.media', 'media')
.where('request.requestedBy = :userId', { userId: user.id })
.andWhere('request.isAutoRequest = true')
.andWhere('media.tmdbId IN (:...tmdbIds)', { tmdbIds: watchlistTmdbIds })
.getMany();
const autoRequestedTmdbIds = new Set(
existingAutoRequests
.filter((r) => r.media != null)
.map((r) => `${r.media.mediaType}:${r.media.tmdbId}`)
);
const unavailableItems = response.items.filter(
// If we can find watchlist items in our database that are also available, we should exclude them
(i) =>
!autoRequestedTmdbIds.has(
`${i.type === 'show' ? MediaType.TV : MediaType.MOVIE}:${i.tmdbId}`
) &&
!mediaItems.find(
(m) =>
m.tmdbId === i.tmdbId &&
(m.status === MediaStatus.BLOCKLISTED ||
(m.status !== MediaStatus.UNKNOWN && m.mediaType === 'movie') ||
((m.status !== MediaStatus.UNKNOWN && m.mediaType === 'movie') ||
(m.mediaType === 'tv' && m.status === MediaStatus.AVAILABLE))
)
);

View File

@@ -174,12 +174,7 @@ mediaRoutes.delete(
where: { id: Number(req.params.id) },
});
if (media.status === MediaStatus.BLOCKLISTED) {
media.resetServiceData();
await mediaRepository.save(media);
} else {
await mediaRepository.remove(media);
}
await mediaRepository.remove(media);
return res.status(204).send();
} catch (e) {

View File

@@ -0,0 +1,182 @@
/* eslint-disable no-console */
import fs from 'fs';
import path from 'path';
import type { TlsOptions } from 'tls';
import {
DataSource,
type DataSourceOptions,
type ObjectLiteral,
} from 'typeorm';
const DB_SSL_PREFIX = 'DB_SSL_';
function boolFromEnv(envVar: string, defaultVal = false) {
if (process.env[envVar]) {
return process.env[envVar]?.toLowerCase() === 'true';
}
return defaultVal;
}
function stringOrReadFileFromEnv(envVar: string): Buffer | string | undefined {
if (process.env[envVar]) {
return process.env[envVar];
}
const filePath = process.env[`${envVar}_FILE`];
if (filePath) {
return fs.readFileSync(filePath);
}
return undefined;
}
function buildSslConfig(): TlsOptions | undefined {
if (process.env.DB_USE_SSL?.toLowerCase() !== 'true') {
return undefined;
}
return {
rejectUnauthorized: boolFromEnv(
`${DB_SSL_PREFIX}REJECT_UNAUTHORIZED`,
true
),
ca: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}CA`),
key: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}KEY`),
cert: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}CERT`),
};
}
const prodConfig: DataSourceOptions = {
type: 'sqlite',
database: process.env.CONFIG_DIRECTORY
? `${process.env.CONFIG_DIRECTORY}/db/db.sqlite3`
: 'config/db/db.sqlite3',
synchronize: false,
migrationsRun: false,
logging: boolFromEnv('DB_LOG_QUERIES'),
enableWAL: true,
// entities: ['dist/entity/**/*.js'],
migrations: ['dist/migration/sqlite/**/*.js'],
subscribers: ['dist/subscriber/**/*.js'],
};
const postgresProdConfig: DataSourceOptions = {
type: 'postgres',
host: process.env.DB_SOCKET_PATH || process.env.DB_HOST,
port: process.env.DB_SOCKET_PATH
? undefined
: parseInt(process.env.DB_PORT ?? '5432'),
username: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME ?? 'seerr',
ssl: buildSslConfig(),
synchronize: false,
migrationsRun: true,
logging: boolFromEnv('DB_LOG_QUERIES'),
// entities: ['dist/entity/**/*.js'],
migrations: ['dist/migration/postgres/**/*.js'],
subscribers: ['dist/subscriber/**/*.js'],
};
async function loadEntities(type: 'sqlite' | 'postgres') {
process.env.DB_TYPE = type;
Object.keys(require.cache).forEach((key) => {
if (key.includes(path.join(__dirname, '../../dist'))) {
delete require.cache[key];
}
});
const entities = await Promise.all(
fs
.readdirSync(path.join(__dirname, '../../dist/entity'))
.filter((file) => file.endsWith('.js'))
.map((file) => {
/* eslint @typescript-eslint/no-var-requires: "off" */
const entityModule = require(
path.join(__dirname, '../../dist/entity', file)
);
return entityModule.default || entityModule[file.replace('.js', '')];
})
);
return entities;
}
async function migrate() {
const sqliteEntities = await loadEntities('sqlite');
const sqliteDataSource = new DataSource({
entities: sqliteEntities,
...prodConfig,
});
await sqliteDataSource.initialize();
console.log('SQLite DataSource initialized.');
const postgresEntities = await loadEntities('postgres');
const postgresDataSource = new DataSource({
entities: postgresEntities,
...postgresProdConfig,
});
await postgresDataSource.initialize();
console.log('Postgres DataSource initialized.');
// create query runner and disable foreign key constraints for Postgres
const queryRunner = postgresDataSource.createQueryRunner();
await queryRunner.connect();
console.log('Disabling foreign key constraints...');
await queryRunner.query(`SET session_replication_role = 'replica';`);
try {
const entities = sqliteDataSource.entityMetadatas;
for (const entity of entities) {
const entityName = entity.name;
const tableName = entity.tableName;
console.log(`Migrating table: ${tableName} (${entityName})...`);
const sourceRepo = sqliteDataSource.getRepository(entityName);
// const targetRepo = postgresDataSource.getRepository(entityName);
const targetRepo = queryRunner.manager.getRepository(entityName);
const BATCH_SIZE = 1000;
let skip = 0;
let totalCount = 0;
let rows: ObjectLiteral[];
do {
rows = await sourceRepo.find({
take: BATCH_SIZE,
skip: skip,
loadEagerRelations: false,
loadRelationIds: true,
});
for (const row of rows) {
// set postgres ID seq to because TypeORM ignores the ID field when saving
if (row.id && typeof row.id === 'number' && row.id > 1) {
await queryRunner.query(`
SELECT setval(pg_get_serial_sequence('${tableName}', 'id'), ${row.id - 1}, true);
`);
}
await targetRepo.save(row, {
transaction: false,
listeners: false,
reload: false,
});
}
skip += BATCH_SIZE;
totalCount += rows.length;
} while (rows.length !== 0);
console.log(` -> Copied ${totalCount} rows.`);
}
} catch (err) {
console.error('Migration failed:', err);
} finally {
console.log('Re-enabling foreign key constraints...');
await queryRunner.query(`SET session_replication_role = 'origin';`);
await queryRunner.release();
await sqliteDataSource.destroy();
await postgresDataSource.destroy();
console.log('Migration complete.');
}
}
migrate();