mirror of
https://github.com/immich-app/immich.git
synced 2026-01-21 00:53:31 -08:00
Compare commits
5 Commits
renovate/g
...
fix-cloud-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f91def26f | ||
|
|
722fcb1f2a | ||
|
|
b7ace1cc55 | ||
|
|
61a9d5cbc7 | ||
|
|
ca0d4b283a |
6
.github/workflows/build-mobile.yml
vendored
6
.github/workflows/build-mobile.yml
vendored
@@ -51,14 +51,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
2
.github/workflows/cache-cleanup.yml
vendored
2
.github/workflows/cache-cleanup.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
actions: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:49f1603397bf9496840162b996e12b089d7d78d7e8b1880ba45545173a1104bf
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:ab9f163cd5d5cec42704a26ca2769ecf3f10aa8e7bae847f1d527cdf075946e6
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -83,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
8
.github/workflows/docker.yml
vendored
8
.github/workflows/docker.yml
vendored
@@ -23,14 +23,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
suffixes: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@fe13f89d76bce06e0f40ec521b074f77bcf05d12 # multi-runner-build-workflow-v2.2.0
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
name: Build and Push Server
|
||||
needs: pre-job
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@fe13f89d76bce06e0f40ec521b074f77bcf05d12 # multi-runner-build-workflow-v2.2.0
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
8
.github/workflows/docs-build.yml
vendored
8
.github/workflows/docs-build.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
4
.github/workflows/docs-deploy.yml
vendored
4
.github/workflows/docs-deploy.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -119,7 +119,7 @@ jobs:
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
2
.github/workflows/docs-destroy.yml
vendored
2
.github/workflows/docs-destroy.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
2
.github/workflows/fix-format.yml
vendored
2
.github/workflows/fix-format.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
2
.github/workflows/pr-labeler.yml
vendored
2
.github/workflows/pr-labeler.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
4
.github/workflows/prepare-release.yml
vendored
4
.github/workflows/prepare-release.yml
vendored
@@ -63,13 +63,13 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
4
.github/workflows/preview-label.yaml
vendored
4
.github/workflows/preview-label.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
4
.github/workflows/release-pr.yml
vendored
4
.github/workflows/release-pr.yml
vendored
@@ -30,13 +30,13 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
4
.github/workflows/sdk.yml
vendored
4
.github/workflows/sdk.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
6
.github/workflows/static_analysis.yml
vendored
6
.github/workflows/static_analysis.yml
vendored
@@ -20,14 +20,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
64
.github/workflows/test.yml
vendored
64
.github/workflows/test.yml
vendored
@@ -17,14 +17,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
working-directory: ./cli
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
working-directory: ./cli
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
working-directory: ./web
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -210,7 +210,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -241,7 +241,7 @@ jobs:
|
||||
working-directory: ./web
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -292,7 +292,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -327,7 +327,7 @@ jobs:
|
||||
working-directory: ./e2e
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -340,7 +340,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -373,7 +373,7 @@ jobs:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -387,7 +387,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -412,7 +412,7 @@ jobs:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -426,7 +426,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -467,7 +467,7 @@ jobs:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -481,7 +481,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -529,7 +529,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -561,7 +561,7 @@ jobs:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -571,7 +571,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
with:
|
||||
python-version: 3.11
|
||||
- name: Install dependencies
|
||||
@@ -601,7 +601,7 @@ jobs:
|
||||
working-directory: ./.github
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -614,7 +614,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -631,7 +631,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -652,7 +652,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -665,7 +665,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -714,7 +714,7 @@ jobs:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
@@ -727,7 +727,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
6
.github/workflows/weblate-lock.yml
vendored
6
.github/workflows/weblate-lock.yml
vendored
@@ -24,14 +24,14 @@ jobs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@4effdb9faa3c120fa03f487f0a476d55fc4cd9f2 # pre-job-action-v2.0.1
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
350
e2e/src/api/specs/database-backups.e2e-spec.ts
Normal file
350
e2e/src/api/specs/database-backups.e2e-spec.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
import { LoginResponseDto, ManualJobName } from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/admin/database-backups', () => {
|
||||
let cookie: string | undefined;
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
describe('GET /', async () => {
|
||||
it('should succeed and be empty', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
backups: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should contain a created backup', async () => {
|
||||
await utils.createJob(admin.accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'backupDatabase');
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [
|
||||
expect.objectContaining({
|
||||
filename: expect.stringMatching(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/),
|
||||
filesize: expect.any(Number),
|
||||
}),
|
||||
],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /', async () => {
|
||||
it('should delete backup', async () => {
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete(`/admin/database-backups`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ backups: [filename] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const { status: listStatus, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(listStatus).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database flow
|
||||
|
||||
describe.sequential('POST /start-restore', () => {
|
||||
afterAll(async () => {
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({ action: 'end' });
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
it.sequential('should not work when the server is configured', async () => {
|
||||
const { status, body } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('The server already has an admin'));
|
||||
});
|
||||
|
||||
it.sequential('should enter maintenance mode in "database restore mode"', async () => {
|
||||
await utils.resetDatabase(); // reset database before running this test
|
||||
|
||||
const { status, headers } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'select_database_restore',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database
|
||||
|
||||
describe.sequential('POST /backups/restore', () => {
|
||||
beforeAll(async () => {
|
||||
await utils.disconnectDatabase();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await utils.connectDatabase();
|
||||
});
|
||||
|
||||
it.sequential('should restore a backup', { timeout: 60_000 }, async () => {
|
||||
let filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
// work-around until test is running on released version
|
||||
await utils.move(
|
||||
`/data/backups/${filename}`,
|
||||
'/data/backups/immich-db-backup-20260114T184016-v2.5.0-pg14.19.sql.gz',
|
||||
);
|
||||
filename = 'immich-db-backup-20260114T184016-v2.5.0-pg14.19.sql.gz';
|
||||
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: filename,
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
}),
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 60_000,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
});
|
||||
|
||||
it.sequential('fail to restore a corrupted backup', { timeout: 60_000 }, async () => {
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: 'development-corrupted.sql.gz',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
|
||||
const { status: status2, body: body2 } = await request(app)
|
||||
.get('/admin/maintenance/status')
|
||||
.set('cookie', cookie!)
|
||||
.send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body2).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: expect.stringContaining('IM CORRUPTED'),
|
||||
}),
|
||||
);
|
||||
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
});
|
||||
|
||||
it.sequential('rollback to restore point if backup is missing admin', { timeout: 60_000 }, async () => {
|
||||
await utils.prepareTestBackup('empty');
|
||||
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: 'development-empty.sql.gz',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 30_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
|
||||
const { status: status2, body: body2 } = await request(app)
|
||||
.get('/admin/maintenance/status')
|
||||
.set('cookie', cookie!)
|
||||
.send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body2).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: expect.stringContaining('Server health check failed, no admin exists.'),
|
||||
}),
|
||||
);
|
||||
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,6 +14,7 @@ describe('/admin/maintenance', () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
// => outside of maintenance mode
|
||||
@@ -26,6 +27,17 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to always indicate we are not in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should not work out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({ token: 'token' });
|
||||
@@ -39,6 +51,7 @@ describe('/admin/maintenance', () => {
|
||||
describe.sequential('POST /', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance').send({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
expect(status).toBe(401);
|
||||
@@ -69,6 +82,7 @@ describe('/admin/maintenance', () => {
|
||||
.send({
|
||||
action: 'start',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
@@ -79,12 +93,13 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
@@ -102,6 +117,17 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to indicate we are in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'start',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should fail without cookie or token in body', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({});
|
||||
@@ -158,12 +184,13 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
|
||||
101
e2e/src/utils.ts
101
e2e/src/utils.ts
@@ -6,7 +6,9 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
JobCreateDto,
|
||||
MaintenanceAction,
|
||||
ManualJobName,
|
||||
MetadataSearchDto,
|
||||
Permission,
|
||||
PersonCreateDto,
|
||||
@@ -21,6 +23,7 @@ import {
|
||||
checkExistingAssets,
|
||||
createAlbum,
|
||||
createApiKey,
|
||||
createJob,
|
||||
createLibrary,
|
||||
createPartner,
|
||||
createPerson,
|
||||
@@ -28,10 +31,12 @@ import {
|
||||
createStack,
|
||||
createUserAdmin,
|
||||
deleteAssets,
|
||||
deleteDatabaseBackup,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
getQueuesLegacy,
|
||||
listDatabaseBackups,
|
||||
login,
|
||||
runQueueCommandLegacy,
|
||||
scanLibrary,
|
||||
@@ -52,11 +57,15 @@ import {
|
||||
import { BrowserContext } from '@playwright/test';
|
||||
import { exec, spawn } from 'node:child_process';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { createWriteStream, existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { mkdtemp } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { setTimeout as setAsyncTimeout } from 'node:timers/promises';
|
||||
import { promisify } from 'node:util';
|
||||
import { createGzip } from 'node:zlib';
|
||||
import pg from 'pg';
|
||||
import { io, type Socket } from 'socket.io-client';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
@@ -84,8 +93,9 @@ export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer $
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
export const dockerExec = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', args.join(' ')]);
|
||||
export const immichAdmin = (args: string[]) => dockerExec([`immich-admin ${args.join(' ')}`]);
|
||||
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
|
||||
export const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
@@ -149,12 +159,26 @@ const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||
};
|
||||
|
||||
export const utils = {
|
||||
connectDatabase: async () => {
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
client.on('end', () => (client = null));
|
||||
client.on('error', () => (client = null));
|
||||
await client.connect();
|
||||
}
|
||||
|
||||
return client;
|
||||
},
|
||||
|
||||
disconnectDatabase: async () => {
|
||||
if (client) {
|
||||
await client.end();
|
||||
}
|
||||
},
|
||||
|
||||
resetDatabase: async (tables?: string[]) => {
|
||||
try {
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
await client.connect();
|
||||
}
|
||||
client = await utils.connectDatabase();
|
||||
|
||||
tables = tables || [
|
||||
// TODO e2e test for deleting a stack, since it is quite complex
|
||||
@@ -481,6 +505,9 @@ export const utils = {
|
||||
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
|
||||
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
createJob: async (accessToken: string, jobCreateDto: JobCreateDto) =>
|
||||
createJob({ jobCreateDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
|
||||
runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
@@ -559,6 +586,45 @@ export const utils = {
|
||||
mkdirSync(`${testAssetDir}/temp`, { recursive: true });
|
||||
},
|
||||
|
||||
async move(source: string, dest: string) {
|
||||
return executeCommand('docker', ['exec', 'immich-e2e-server', 'mv', source, dest]).promise;
|
||||
},
|
||||
|
||||
createBackup: async (accessToken: string) => {
|
||||
await utils.createJob(accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
return utils.poll(
|
||||
() => request(app).get('/admin/database-backups').set('Authorization', `Bearer ${accessToken}`),
|
||||
({ status, body }) => status === 200 && body.backups.length === 1,
|
||||
({ body }) => body.backups[0].filename,
|
||||
);
|
||||
},
|
||||
|
||||
resetBackups: async (accessToken: string) => {
|
||||
const { backups } = await listDatabaseBackups({ headers: asBearerAuth(accessToken) });
|
||||
|
||||
const backupFiles = backups.map((b) => b.filename);
|
||||
await deleteDatabaseBackup(
|
||||
{ databaseBackupDeleteDto: { backups: backupFiles } },
|
||||
{ headers: asBearerAuth(accessToken) },
|
||||
);
|
||||
},
|
||||
|
||||
prepareTestBackup: async (generate: 'empty' | 'corrupted') => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'test-'));
|
||||
const fn = join(dir, 'file');
|
||||
|
||||
const sql = Readable.from(generate === 'corrupted' ? 'IM CORRUPTED;' : 'SELECT 1;');
|
||||
const gzip = createGzip();
|
||||
const writeStream = createWriteStream(fn);
|
||||
await pipeline(sql, gzip, writeStream);
|
||||
|
||||
await executeCommand('docker', ['cp', fn, `immich-e2e-server:/data/backups/development-${generate}.sql.gz`])
|
||||
.promise;
|
||||
},
|
||||
|
||||
resetAdminConfig: async (accessToken: string) => {
|
||||
const defaultConfig = await getConfigDefaults({ headers: asBearerAuth(accessToken) });
|
||||
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
|
||||
@@ -601,6 +667,25 @@ export const utils = {
|
||||
await utils.waitForQueueFinish(accessToken, 'sidecar');
|
||||
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
|
||||
},
|
||||
|
||||
async poll<T>(cb: () => Promise<T>, validate: (value: T) => boolean, map?: (value: T) => any) {
|
||||
let timeout = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const data = await cb();
|
||||
if (validate(data)) {
|
||||
return map ? map(data) : data;
|
||||
}
|
||||
timeout++;
|
||||
if (timeout >= 10) {
|
||||
throw 'Could not clean up test.';
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 5e2));
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
utils.initSdk();
|
||||
|
||||
105
e2e/src/web/specs/database-backups.e2e-spec.ts
Normal file
105
e2e/src/web/specs/database-backups.e2e-spec.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
|
||||
test.describe('Database Backups', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
test('restore a backup from settings', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
// work-around until test is running on released version
|
||||
await utils.move(
|
||||
`/data/backups/${filename}`,
|
||||
'/data/backups/immich-db-backup-20260114T184016-v2.5.0-pg14.19.sql.gz',
|
||||
);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.getByRole('dialog').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/admin/maintenance**', { timeout: 60_000 });
|
||||
});
|
||||
|
||||
test('handle backup restore failure', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.getByRole('dialog').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await expect(page.getByText('IM CORRUPTED')).toBeVisible({ timeout: 60_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/admin/maintenance**');
|
||||
});
|
||||
|
||||
test('rollback to restore point if backup is missing admin', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.prepareTestBackup('empty');
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.getByRole('dialog').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await expect(page.getByText('Server health check failed, no admin exists.')).toBeVisible({ timeout: 60_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/admin/maintenance**');
|
||||
});
|
||||
|
||||
test('restore a backup from onboarding', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
// work-around until test is running on released version
|
||||
await utils.move(
|
||||
`/data/backups/${filename}`,
|
||||
'/data/backups/immich-db-backup-20260114T184016-v2.5.0-pg14.19.sql.gz',
|
||||
);
|
||||
|
||||
await utils.resetDatabase();
|
||||
|
||||
await page.goto('/');
|
||||
await page.getByRole('button', { name: 'Restore from backup' }).click();
|
||||
|
||||
try {
|
||||
await page.waitForURL('/maintenance**');
|
||||
} catch {
|
||||
// when chained with the rest of the tests
|
||||
// this navigation may fail..? not sure why...
|
||||
await page.goto('/maintenance');
|
||||
await page.waitForURL('/maintenance**');
|
||||
}
|
||||
|
||||
await page.getByRole('button', { name: 'Next' }).click();
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.getByRole('dialog').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/photos', { timeout: 60_000 });
|
||||
});
|
||||
});
|
||||
@@ -16,12 +16,12 @@ test.describe('Maintenance', () => {
|
||||
test('enter and exit maintenance mode', async ({ context, page }) => {
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/system-settings?isOpen=maintenance');
|
||||
await page.getByRole('button', { name: 'Start maintenance mode' }).click();
|
||||
await page.goto('/admin/maintenance');
|
||||
await page.getByRole('button', { name: 'Switch to maintenance mode' }).click();
|
||||
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible({ timeout: 10_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('**/admin/system-settings*', { timeout: 10_000 });
|
||||
await page.waitForURL('**/admin/maintenance*', { timeout: 10_000 });
|
||||
});
|
||||
|
||||
test('maintenance shows no options to users until they authenticate', async ({ page }) => {
|
||||
|
||||
32
i18n/en.json
32
i18n/en.json
@@ -188,10 +188,21 @@
|
||||
"machine_learning_smart_search_enabled": "Enable smart search",
|
||||
"machine_learning_smart_search_enabled_description": "If disabled, images will not be encoded for smart search.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.",
|
||||
"maintenance_delete_backup": "Delete Backup",
|
||||
"maintenance_delete_backup_description": "This file will be irrevocably deleted.",
|
||||
"maintenance_delete_error": "Failed to delete backup.",
|
||||
"maintenance_restore_backup": "Restore Backup",
|
||||
"maintenance_restore_backup_description": "Immich will be wiped and restored from the chosen backup. A backup will be created before continuing.",
|
||||
"maintenance_restore_backup_different_version": "This backup was created with a different version of Immich!",
|
||||
"maintenance_restore_backup_unknown_version": "Couldn't determine backup version.",
|
||||
"maintenance_restore_database_backup": "Restore database backup",
|
||||
"maintenance_restore_database_backup_description": "Rollback to an earlier database state using a backup file",
|
||||
"maintenance_settings": "Maintenance",
|
||||
"maintenance_settings_description": "Put Immich into maintenance mode.",
|
||||
"maintenance_start": "Start maintenance mode",
|
||||
"maintenance_start": "Switch to maintenance mode",
|
||||
"maintenance_start_error": "Failed to start maintenance mode.",
|
||||
"maintenance_upload_backup": "Upload database backup file",
|
||||
"maintenance_upload_backup_error": "Could not upload backup, is it an .sql/.sql.gz file?",
|
||||
"manage_concurrency": "Manage Concurrency",
|
||||
"manage_concurrency_description": "Navigate to the jobs page to manage job concurrency",
|
||||
"manage_log_settings": "Manage log settings",
|
||||
@@ -1404,10 +1415,28 @@
|
||||
"loop_videos_description": "Enable to automatically loop a video in the detail viewer.",
|
||||
"main_branch_warning": "You're using a development version; we strongly recommend using a release version!",
|
||||
"main_menu": "Main menu",
|
||||
"maintenance_action_restore": "Restoring Database",
|
||||
"maintenance_description": "Immich has been put into <link>maintenance mode</link>.",
|
||||
"maintenance_end": "End maintenance mode",
|
||||
"maintenance_end_error": "Failed to end maintenance mode.",
|
||||
"maintenance_logged_in_as": "Currently logged in as {user}",
|
||||
"maintenance_restore_from_backup": "Restore From Backup",
|
||||
"maintenance_restore_library": "Restore Your Library",
|
||||
"maintenance_restore_library_confirm": "If this looks correct, continue to restoring a backup!",
|
||||
"maintenance_restore_library_description": "Restoring Database",
|
||||
"maintenance_restore_library_folder_has_files": "{folder} has {count} folder(s)",
|
||||
"maintenance_restore_library_folder_no_files": "{folder} is missing files!",
|
||||
"maintenance_restore_library_folder_pass": "readable and writable",
|
||||
"maintenance_restore_library_folder_read_fail": "not readable",
|
||||
"maintenance_restore_library_folder_write_fail": "not writable",
|
||||
"maintenance_restore_library_hint_missing_files": "You may be missing important files",
|
||||
"maintenance_restore_library_hint_regenerate_later": "You can regenerate these later in settings",
|
||||
"maintenance_restore_library_hint_storage_template_missing_files": "Using storage template? You may be missing files",
|
||||
"maintenance_restore_library_loading": "Loading integrity checks and heuristics…",
|
||||
"maintenance_task_backup": "Creating a backup of the existing database…",
|
||||
"maintenance_task_migrations": "Running database migrations…",
|
||||
"maintenance_task_restore": "Restoring the chosen backup…",
|
||||
"maintenance_task_rollback": "Restore failed, rolling back to restore point…",
|
||||
"maintenance_title": "Temporarily Unavailable",
|
||||
"make": "Make",
|
||||
"manage_geolocation": "Manage location",
|
||||
@@ -2215,6 +2244,7 @@
|
||||
"unhide_person": "Unhide person",
|
||||
"unknown": "Unknown",
|
||||
"unknown_country": "Unknown Country",
|
||||
"unknown_date": "Unknown date",
|
||||
"unknown_year": "Unknown Year",
|
||||
"unlimited": "Unlimited",
|
||||
"unlink_motion_video": "Unlink motion video",
|
||||
|
||||
@@ -307,10 +307,10 @@ class BackgroundUploadService {
|
||||
priority: priority,
|
||||
isFavorite: asset.isFavorite,
|
||||
requiresWiFi: requiresWiFi,
|
||||
cloudId: asset.cloudId,
|
||||
adjustmentTime: asset.adjustmentTime?.toIso8601String(),
|
||||
latitude: asset.latitude?.toString(),
|
||||
longitude: asset.longitude?.toString(),
|
||||
cloudId: entity.isLivePhoto ? null : asset.cloudId,
|
||||
adjustmentTime: entity.isLivePhoto ? null : asset.adjustmentTime?.toIso8601String(),
|
||||
latitude: entity.isLivePhoto ? null : asset.latitude?.toString(),
|
||||
longitude: entity.isLivePhoto ? null : asset.longitude?.toString(),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import 'package:immich_mobile/platform/connectivity_api.g.dart';
|
||||
import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
|
||||
import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/repositories/upload.repository.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
@@ -40,6 +41,7 @@ final foregroundUploadServiceProvider = Provider((ref) {
|
||||
ref.watch(backupRepositoryProvider),
|
||||
ref.watch(connectivityApiProvider),
|
||||
ref.watch(appSettingsServiceProvider),
|
||||
ref.watch(assetMediaRepositoryProvider),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -55,6 +57,7 @@ class ForegroundUploadService {
|
||||
this._backupRepository,
|
||||
this._connectivityApi,
|
||||
this._appSettingsService,
|
||||
this._assetMediaRepository,
|
||||
);
|
||||
|
||||
final UploadRepository _uploadRepository;
|
||||
@@ -62,6 +65,7 @@ class ForegroundUploadService {
|
||||
final DriftBackupRepository _backupRepository;
|
||||
final ConnectivityApi _connectivityApi;
|
||||
final AppSettingsService _appSettingsService;
|
||||
final AssetMediaRepository _assetMediaRepository;
|
||||
final Logger _logger = Logger('ForegroundUploadService');
|
||||
|
||||
bool shouldAbortUpload = false;
|
||||
@@ -311,7 +315,8 @@ class ForegroundUploadService {
|
||||
return;
|
||||
}
|
||||
|
||||
final originalFileName = entity.isLivePhoto ? p.setExtension(asset.name, p.extension(file.path)) : asset.name;
|
||||
final fileName = await _assetMediaRepository.getOriginalFilename(asset.id) ?? asset.name;
|
||||
final originalFileName = entity.isLivePhoto ? p.setExtension(fileName, p.extension(file.path)) : fileName;
|
||||
final deviceId = Store.get(StoreKey.deviceId);
|
||||
|
||||
final headers = ApiService.getRequestHeaders();
|
||||
@@ -322,19 +327,6 @@ class ForegroundUploadService {
|
||||
'fileModifiedAt': asset.updatedAt.toUtc().toIso8601String(),
|
||||
'isFavorite': asset.isFavorite.toString(),
|
||||
'duration': asset.duration.toString(),
|
||||
if (CurrentPlatform.isIOS && asset.cloudId != null)
|
||||
'metadata': jsonEncode([
|
||||
RemoteAssetMetadataItem(
|
||||
key: RemoteAssetMetadataKey.mobileApp,
|
||||
value: RemoteAssetMobileAppMetadata(
|
||||
cloudId: asset.cloudId,
|
||||
createdAt: asset.createdAt.toIso8601String(),
|
||||
adjustmentTime: asset.adjustmentTime?.toIso8601String(),
|
||||
latitude: asset.latitude?.toString(),
|
||||
longitude: asset.longitude?.toString(),
|
||||
),
|
||||
),
|
||||
]),
|
||||
};
|
||||
|
||||
// Upload live photo video first if available
|
||||
@@ -363,6 +355,22 @@ class ForegroundUploadService {
|
||||
fields['livePhotoVideoId'] = livePhotoVideoId;
|
||||
}
|
||||
|
||||
// Add cloudId metadata only to the still image, not the motion video, becasue when the sync id happens, the motion video can get associated with the wrong still image.
|
||||
if (CurrentPlatform.isIOS && asset.cloudId != null) {
|
||||
fields['metadata'] = jsonEncode([
|
||||
RemoteAssetMetadataItem(
|
||||
key: RemoteAssetMetadataKey.mobileApp,
|
||||
value: RemoteAssetMobileAppMetadata(
|
||||
cloudId: asset.cloudId,
|
||||
createdAt: asset.createdAt.toIso8601String(),
|
||||
adjustmentTime: asset.adjustmentTime?.toIso8601String(),
|
||||
latitude: asset.latitude?.toString(),
|
||||
longitude: asset.longitude?.toString(),
|
||||
),
|
||||
),
|
||||
]);
|
||||
}
|
||||
|
||||
final result = await _uploadRepository.uploadFile(
|
||||
file: file,
|
||||
originalFileName: originalFileName,
|
||||
|
||||
14
mobile/openapi/README.md
generated
14
mobile/openapi/README.md
generated
@@ -138,6 +138,11 @@ Class | Method | HTTP request | Description
|
||||
*AuthenticationApi* | [**unlockAuthSession**](doc//AuthenticationApi.md#unlockauthsession) | **POST** /auth/session/unlock | Unlock auth session
|
||||
*AuthenticationApi* | [**validateAccessToken**](doc//AuthenticationApi.md#validateaccesstoken) | **POST** /auth/validateToken | Validate access token
|
||||
*AuthenticationAdminApi* | [**unlinkAllOAuthAccountsAdmin**](doc//AuthenticationAdminApi.md#unlinkalloauthaccountsadmin) | **POST** /admin/auth/unlink-all | Unlink all OAuth accounts
|
||||
*DatabaseBackupsAdminApi* | [**deleteDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#deletedatabasebackup) | **DELETE** /admin/database-backups | Delete database backup
|
||||
*DatabaseBackupsAdminApi* | [**downloadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#downloaddatabasebackup) | **GET** /admin/database-backups/{filename} | Download database backup
|
||||
*DatabaseBackupsAdminApi* | [**listDatabaseBackups**](doc//DatabaseBackupsAdminApi.md#listdatabasebackups) | **GET** /admin/database-backups | List database backups
|
||||
*DatabaseBackupsAdminApi* | [**startDatabaseRestoreFlow**](doc//DatabaseBackupsAdminApi.md#startdatabaserestoreflow) | **POST** /admin/database-backups/start-restore | Start database backup restore flow
|
||||
*DatabaseBackupsAdminApi* | [**uploadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#uploaddatabasebackup) | **POST** /admin/database-backups/upload | Upload database backup
|
||||
*DeprecatedApi* | [**createPartnerDeprecated**](doc//DeprecatedApi.md#createpartnerdeprecated) | **POST** /partners/{id} | Create a partner
|
||||
*DeprecatedApi* | [**getAllUserAssetsByDeviceId**](doc//DeprecatedApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | Retrieve assets by device ID
|
||||
*DeprecatedApi* | [**getDeltaSync**](doc//DeprecatedApi.md#getdeltasync) | **POST** /sync/delta-sync | Get delta sync for user
|
||||
@@ -166,6 +171,8 @@ Class | Method | HTTP request | Description
|
||||
*LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | Scan a library
|
||||
*LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | Update a library
|
||||
*LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | Validate library settings
|
||||
*MaintenanceAdminApi* | [**detectPriorInstall**](doc//MaintenanceAdminApi.md#detectpriorinstall) | **GET** /admin/maintenance/detect-install | Detect existing install
|
||||
*MaintenanceAdminApi* | [**getMaintenanceStatus**](doc//MaintenanceAdminApi.md#getmaintenancestatus) | **GET** /admin/maintenance/status | Get maintenance mode status
|
||||
*MaintenanceAdminApi* | [**maintenanceLogin**](doc//MaintenanceAdminApi.md#maintenancelogin) | **POST** /admin/maintenance/login | Log into maintenance mode
|
||||
*MaintenanceAdminApi* | [**setMaintenanceMode**](doc//MaintenanceAdminApi.md#setmaintenancemode) | **POST** /admin/maintenance | Set maintenance mode
|
||||
*MapApi* | [**getMapMarkers**](doc//MapApi.md#getmapmarkers) | **GET** /map/markers | Retrieve map markers
|
||||
@@ -405,6 +412,9 @@ Class | Method | HTTP request | Description
|
||||
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
||||
- [CropParameters](doc//CropParameters.md)
|
||||
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
|
||||
- [DatabaseBackupDeleteDto](doc//DatabaseBackupDeleteDto.md)
|
||||
- [DatabaseBackupDto](doc//DatabaseBackupDto.md)
|
||||
- [DatabaseBackupListResponseDto](doc//DatabaseBackupListResponseDto.md)
|
||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||
- [DownloadResponse](doc//DownloadResponse.md)
|
||||
@@ -434,7 +444,10 @@ Class | Method | HTTP request | Description
|
||||
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
|
||||
- [MaintenanceAction](doc//MaintenanceAction.md)
|
||||
- [MaintenanceAuthDto](doc//MaintenanceAuthDto.md)
|
||||
- [MaintenanceDetectInstallResponseDto](doc//MaintenanceDetectInstallResponseDto.md)
|
||||
- [MaintenanceDetectInstallStorageFolderDto](doc//MaintenanceDetectInstallStorageFolderDto.md)
|
||||
- [MaintenanceLoginDto](doc//MaintenanceLoginDto.md)
|
||||
- [MaintenanceStatusResponseDto](doc//MaintenanceStatusResponseDto.md)
|
||||
- [ManualJobName](doc//ManualJobName.md)
|
||||
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
|
||||
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)
|
||||
@@ -550,6 +563,7 @@ Class | Method | HTTP request | Description
|
||||
- [StackResponseDto](doc//StackResponseDto.md)
|
||||
- [StackUpdateDto](doc//StackUpdateDto.md)
|
||||
- [StatisticsSearchDto](doc//StatisticsSearchDto.md)
|
||||
- [StorageFolder](doc//StorageFolder.md)
|
||||
- [SyncAckDeleteDto](doc//SyncAckDeleteDto.md)
|
||||
- [SyncAckDto](doc//SyncAckDto.md)
|
||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||
|
||||
8
mobile/openapi/lib/api.dart
generated
8
mobile/openapi/lib/api.dart
generated
@@ -36,6 +36,7 @@ part 'api/albums_api.dart';
|
||||
part 'api/assets_api.dart';
|
||||
part 'api/authentication_api.dart';
|
||||
part 'api/authentication_admin_api.dart';
|
||||
part 'api/database_backups_admin_api.dart';
|
||||
part 'api/deprecated_api.dart';
|
||||
part 'api/download_api.dart';
|
||||
part 'api/duplicates_api.dart';
|
||||
@@ -151,6 +152,9 @@ part 'model/create_library_dto.dart';
|
||||
part 'model/create_profile_image_response_dto.dart';
|
||||
part 'model/crop_parameters.dart';
|
||||
part 'model/database_backup_config.dart';
|
||||
part 'model/database_backup_delete_dto.dart';
|
||||
part 'model/database_backup_dto.dart';
|
||||
part 'model/database_backup_list_response_dto.dart';
|
||||
part 'model/download_archive_info.dart';
|
||||
part 'model/download_info_dto.dart';
|
||||
part 'model/download_response.dart';
|
||||
@@ -180,7 +184,10 @@ part 'model/logout_response_dto.dart';
|
||||
part 'model/machine_learning_availability_checks_dto.dart';
|
||||
part 'model/maintenance_action.dart';
|
||||
part 'model/maintenance_auth_dto.dart';
|
||||
part 'model/maintenance_detect_install_response_dto.dart';
|
||||
part 'model/maintenance_detect_install_storage_folder_dto.dart';
|
||||
part 'model/maintenance_login_dto.dart';
|
||||
part 'model/maintenance_status_response_dto.dart';
|
||||
part 'model/manual_job_name.dart';
|
||||
part 'model/map_marker_response_dto.dart';
|
||||
part 'model/map_reverse_geocode_response_dto.dart';
|
||||
@@ -296,6 +303,7 @@ part 'model/stack_create_dto.dart';
|
||||
part 'model/stack_response_dto.dart';
|
||||
part 'model/stack_update_dto.dart';
|
||||
part 'model/statistics_search_dto.dart';
|
||||
part 'model/storage_folder.dart';
|
||||
part 'model/sync_ack_delete_dto.dart';
|
||||
part 'model/sync_ack_dto.dart';
|
||||
part 'model/sync_ack_set_dto.dart';
|
||||
|
||||
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
Normal file
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
Normal file
@@ -0,0 +1,269 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class DatabaseBackupsAdminApi {
|
||||
DatabaseBackupsAdminApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<Response> deleteDatabaseBackupWithHttpInfo(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody = databaseBackupDeleteDto;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['application/json'];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'DELETE',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<void> deleteDatabaseBackup(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
final response = await deleteDatabaseBackupWithHttpInfo(databaseBackupDeleteDto,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<Response> downloadDatabaseBackupWithHttpInfo(String filename,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/{filename}'
|
||||
.replaceAll('{filename}', filename);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<MultipartFile?> downloadDatabaseBackup(String filename,) async {
|
||||
final response = await downloadDatabaseBackupWithHttpInfo(filename,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> listDatabaseBackupsWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
Future<DatabaseBackupListResponseDto?> listDatabaseBackups() async {
|
||||
final response = await listDatabaseBackupsWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'DatabaseBackupListResponseDto',) as DatabaseBackupListResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> startDatabaseRestoreFlowWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/start-restore';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
Future<void> startDatabaseRestoreFlow() async {
|
||||
final response = await startDatabaseRestoreFlowWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<Response> uploadDatabaseBackupWithHttpInfo({ MultipartFile? file, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/upload';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['multipart/form-data'];
|
||||
|
||||
bool hasFields = false;
|
||||
final mp = MultipartRequest('POST', Uri.parse(apiPath));
|
||||
if (file != null) {
|
||||
hasFields = true;
|
||||
mp.fields[r'file'] = file.field;
|
||||
mp.files.add(file);
|
||||
}
|
||||
if (hasFields) {
|
||||
postBody = mp;
|
||||
}
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<void> uploadDatabaseBackup({ MultipartFile? file, }) async {
|
||||
final response = await uploadDatabaseBackupWithHttpInfo( file: file, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
}
|
||||
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
@@ -16,6 +16,102 @@ class MaintenanceAdminApi {
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> detectPriorInstallWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/detect-install';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
Future<MaintenanceDetectInstallResponseDto?> detectPriorInstall() async {
|
||||
final response = await detectPriorInstallWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceDetectInstallResponseDto',) as MaintenanceDetectInstallResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> getMaintenanceStatusWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/status';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
Future<MaintenanceStatusResponseDto?> getMaintenanceStatus() async {
|
||||
final response = await getMaintenanceStatusWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceStatusResponseDto',) as MaintenanceStatusResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Log into maintenance mode
|
||||
///
|
||||
/// Login with maintenance token or cookie to receive current information and perform further actions.
|
||||
|
||||
14
mobile/openapi/lib/api_client.dart
generated
14
mobile/openapi/lib/api_client.dart
generated
@@ -350,6 +350,12 @@ class ApiClient {
|
||||
return CropParameters.fromJson(value);
|
||||
case 'DatabaseBackupConfig':
|
||||
return DatabaseBackupConfig.fromJson(value);
|
||||
case 'DatabaseBackupDeleteDto':
|
||||
return DatabaseBackupDeleteDto.fromJson(value);
|
||||
case 'DatabaseBackupDto':
|
||||
return DatabaseBackupDto.fromJson(value);
|
||||
case 'DatabaseBackupListResponseDto':
|
||||
return DatabaseBackupListResponseDto.fromJson(value);
|
||||
case 'DownloadArchiveInfo':
|
||||
return DownloadArchiveInfo.fromJson(value);
|
||||
case 'DownloadInfoDto':
|
||||
@@ -408,8 +414,14 @@ class ApiClient {
|
||||
return MaintenanceActionTypeTransformer().decode(value);
|
||||
case 'MaintenanceAuthDto':
|
||||
return MaintenanceAuthDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallResponseDto':
|
||||
return MaintenanceDetectInstallResponseDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallStorageFolderDto':
|
||||
return MaintenanceDetectInstallStorageFolderDto.fromJson(value);
|
||||
case 'MaintenanceLoginDto':
|
||||
return MaintenanceLoginDto.fromJson(value);
|
||||
case 'MaintenanceStatusResponseDto':
|
||||
return MaintenanceStatusResponseDto.fromJson(value);
|
||||
case 'ManualJobName':
|
||||
return ManualJobNameTypeTransformer().decode(value);
|
||||
case 'MapMarkerResponseDto':
|
||||
@@ -640,6 +652,8 @@ class ApiClient {
|
||||
return StackUpdateDto.fromJson(value);
|
||||
case 'StatisticsSearchDto':
|
||||
return StatisticsSearchDto.fromJson(value);
|
||||
case 'StorageFolder':
|
||||
return StorageFolderTypeTransformer().decode(value);
|
||||
case 'SyncAckDeleteDto':
|
||||
return SyncAckDeleteDto.fromJson(value);
|
||||
case 'SyncAckDto':
|
||||
|
||||
3
mobile/openapi/lib/api_helper.dart
generated
3
mobile/openapi/lib/api_helper.dart
generated
@@ -160,6 +160,9 @@ String parameterToString(dynamic value) {
|
||||
if (value is SourceType) {
|
||||
return SourceTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is StorageFolder) {
|
||||
return StorageFolderTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is SyncEntityType) {
|
||||
return SyncEntityTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
|
||||
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
Normal file
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
Normal file
@@ -0,0 +1,101 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupDeleteDto {
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance.
|
||||
DatabaseBackupDeleteDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<String> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupDeleteDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupDeleteDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupDeleteDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupDeleteDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupDeleteDto(
|
||||
backups: json[r'backups'] is Iterable
|
||||
? (json[r'backups'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupDeleteDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupDeleteDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupDeleteDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupDeleteDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupDeleteDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupDeleteDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupDeleteDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupDeleteDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
107
mobile/openapi/lib/model/database_backup_dto.dart
generated
Normal file
107
mobile/openapi/lib/model/database_backup_dto.dart
generated
Normal file
@@ -0,0 +1,107 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupDto {
|
||||
/// Returns a new [DatabaseBackupDto] instance.
|
||||
DatabaseBackupDto({
|
||||
required this.filename,
|
||||
required this.filesize,
|
||||
});
|
||||
|
||||
String filename;
|
||||
|
||||
num filesize;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupDto &&
|
||||
other.filename == filename &&
|
||||
other.filesize == filesize;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(filename.hashCode) +
|
||||
(filesize.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupDto[filename=$filename, filesize=$filesize]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'filename'] = this.filename;
|
||||
json[r'filesize'] = this.filesize;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupDto(
|
||||
filename: mapValueOfType<String>(json, r'filename')!,
|
||||
filesize: num.parse('${json[r'filesize']}'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'filename',
|
||||
'filesize',
|
||||
};
|
||||
}
|
||||
|
||||
99
mobile/openapi/lib/model/database_backup_list_response_dto.dart
generated
Normal file
99
mobile/openapi/lib/model/database_backup_list_response_dto.dart
generated
Normal file
@@ -0,0 +1,99 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupListResponseDto {
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance.
|
||||
DatabaseBackupListResponseDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<DatabaseBackupDto> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupListResponseDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupListResponseDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupListResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupListResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupListResponseDto(
|
||||
backups: DatabaseBackupDto.listFromJson(json[r'backups']),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupListResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupListResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupListResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupListResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupListResponseDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupListResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupListResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupListResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
6
mobile/openapi/lib/model/maintenance_action.dart
generated
6
mobile/openapi/lib/model/maintenance_action.dart
generated
@@ -25,11 +25,15 @@ class MaintenanceAction {
|
||||
|
||||
static const start = MaintenanceAction._(r'start');
|
||||
static const end = MaintenanceAction._(r'end');
|
||||
static const selectDatabaseRestore = MaintenanceAction._(r'select_database_restore');
|
||||
static const restoreDatabase = MaintenanceAction._(r'restore_database');
|
||||
|
||||
/// List of all possible values in this [enum][MaintenanceAction].
|
||||
static const values = <MaintenanceAction>[
|
||||
start,
|
||||
end,
|
||||
selectDatabaseRestore,
|
||||
restoreDatabase,
|
||||
];
|
||||
|
||||
static MaintenanceAction? fromJson(dynamic value) => MaintenanceActionTypeTransformer().decode(value);
|
||||
@@ -70,6 +74,8 @@ class MaintenanceActionTypeTransformer {
|
||||
switch (data) {
|
||||
case r'start': return MaintenanceAction.start;
|
||||
case r'end': return MaintenanceAction.end;
|
||||
case r'select_database_restore': return MaintenanceAction.selectDatabaseRestore;
|
||||
case r'restore_database': return MaintenanceAction.restoreDatabase;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
||||
99
mobile/openapi/lib/model/maintenance_detect_install_response_dto.dart
generated
Normal file
99
mobile/openapi/lib/model/maintenance_detect_install_response_dto.dart
generated
Normal file
@@ -0,0 +1,99 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallResponseDto {
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance.
|
||||
MaintenanceDetectInstallResponseDto({
|
||||
this.storage = const [],
|
||||
});
|
||||
|
||||
List<MaintenanceDetectInstallStorageFolderDto> storage;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallResponseDto &&
|
||||
_deepEquality.equals(other.storage, storage);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(storage.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallResponseDto[storage=$storage]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'storage'] = this.storage;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallResponseDto(
|
||||
storage: MaintenanceDetectInstallStorageFolderDto.listFromJson(json[r'storage']),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'storage',
|
||||
};
|
||||
}
|
||||
|
||||
123
mobile/openapi/lib/model/maintenance_detect_install_storage_folder_dto.dart
generated
Normal file
123
mobile/openapi/lib/model/maintenance_detect_install_storage_folder_dto.dart
generated
Normal file
@@ -0,0 +1,123 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallStorageFolderDto {
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance.
|
||||
MaintenanceDetectInstallStorageFolderDto({
|
||||
required this.files,
|
||||
required this.folder,
|
||||
required this.readable,
|
||||
required this.writable,
|
||||
});
|
||||
|
||||
num files;
|
||||
|
||||
StorageFolder folder;
|
||||
|
||||
bool readable;
|
||||
|
||||
bool writable;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallStorageFolderDto &&
|
||||
other.files == files &&
|
||||
other.folder == folder &&
|
||||
other.readable == readable &&
|
||||
other.writable == writable;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(files.hashCode) +
|
||||
(folder.hashCode) +
|
||||
(readable.hashCode) +
|
||||
(writable.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallStorageFolderDto[files=$files, folder=$folder, readable=$readable, writable=$writable]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'files'] = this.files;
|
||||
json[r'folder'] = this.folder;
|
||||
json[r'readable'] = this.readable;
|
||||
json[r'writable'] = this.writable;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallStorageFolderDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallStorageFolderDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallStorageFolderDto(
|
||||
files: num.parse('${json[r'files']}'),
|
||||
folder: StorageFolder.fromJson(json[r'folder'])!,
|
||||
readable: mapValueOfType<bool>(json, r'readable')!,
|
||||
writable: mapValueOfType<bool>(json, r'writable')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallStorageFolderDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallStorageFolderDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallStorageFolderDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallStorageFolderDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallStorageFolderDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallStorageFolderDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallStorageFolderDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallStorageFolderDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'files',
|
||||
'folder',
|
||||
'readable',
|
||||
'writable',
|
||||
};
|
||||
}
|
||||
|
||||
158
mobile/openapi/lib/model/maintenance_status_response_dto.dart
generated
Normal file
158
mobile/openapi/lib/model/maintenance_status_response_dto.dart
generated
Normal file
@@ -0,0 +1,158 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceStatusResponseDto {
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance.
|
||||
MaintenanceStatusResponseDto({
|
||||
required this.action,
|
||||
required this.active,
|
||||
this.error,
|
||||
this.progress,
|
||||
this.task,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
bool active;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? error;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
num? progress;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? task;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceStatusResponseDto &&
|
||||
other.action == action &&
|
||||
other.active == active &&
|
||||
other.error == error &&
|
||||
other.progress == progress &&
|
||||
other.task == task;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode) +
|
||||
(active.hashCode) +
|
||||
(error == null ? 0 : error!.hashCode) +
|
||||
(progress == null ? 0 : progress!.hashCode) +
|
||||
(task == null ? 0 : task!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceStatusResponseDto[action=$action, active=$active, error=$error, progress=$progress, task=$task]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
json[r'active'] = this.active;
|
||||
if (this.error != null) {
|
||||
json[r'error'] = this.error;
|
||||
} else {
|
||||
// json[r'error'] = null;
|
||||
}
|
||||
if (this.progress != null) {
|
||||
json[r'progress'] = this.progress;
|
||||
} else {
|
||||
// json[r'progress'] = null;
|
||||
}
|
||||
if (this.task != null) {
|
||||
json[r'task'] = this.task;
|
||||
} else {
|
||||
// json[r'task'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceStatusResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceStatusResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceStatusResponseDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
active: mapValueOfType<bool>(json, r'active')!,
|
||||
error: mapValueOfType<String>(json, r'error'),
|
||||
progress: num.parse('${json[r'progress']}'),
|
||||
task: mapValueOfType<String>(json, r'task'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceStatusResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceStatusResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceStatusResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceStatusResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceStatusResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceStatusResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceStatusResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceStatusResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'action',
|
||||
'active',
|
||||
};
|
||||
}
|
||||
|
||||
12
mobile/openapi/lib/model/permission.dart
generated
12
mobile/openapi/lib/model/permission.dart
generated
@@ -62,6 +62,10 @@ class Permission {
|
||||
static const authPeriodChangePassword = Permission._(r'auth.changePassword');
|
||||
static const authDevicePeriodDelete = Permission._(r'authDevice.delete');
|
||||
static const archivePeriodRead = Permission._(r'archive.read');
|
||||
static const backupPeriodList = Permission._(r'backup.list');
|
||||
static const backupPeriodDownload = Permission._(r'backup.download');
|
||||
static const backupPeriodUpload = Permission._(r'backup.upload');
|
||||
static const backupPeriodDelete = Permission._(r'backup.delete');
|
||||
static const duplicatePeriodRead = Permission._(r'duplicate.read');
|
||||
static const duplicatePeriodDelete = Permission._(r'duplicate.delete');
|
||||
static const facePeriodCreate = Permission._(r'face.create');
|
||||
@@ -214,6 +218,10 @@ class Permission {
|
||||
authPeriodChangePassword,
|
||||
authDevicePeriodDelete,
|
||||
archivePeriodRead,
|
||||
backupPeriodList,
|
||||
backupPeriodDownload,
|
||||
backupPeriodUpload,
|
||||
backupPeriodDelete,
|
||||
duplicatePeriodRead,
|
||||
duplicatePeriodDelete,
|
||||
facePeriodCreate,
|
||||
@@ -401,6 +409,10 @@ class PermissionTypeTransformer {
|
||||
case r'auth.changePassword': return Permission.authPeriodChangePassword;
|
||||
case r'authDevice.delete': return Permission.authDevicePeriodDelete;
|
||||
case r'archive.read': return Permission.archivePeriodRead;
|
||||
case r'backup.list': return Permission.backupPeriodList;
|
||||
case r'backup.download': return Permission.backupPeriodDownload;
|
||||
case r'backup.upload': return Permission.backupPeriodUpload;
|
||||
case r'backup.delete': return Permission.backupPeriodDelete;
|
||||
case r'duplicate.read': return Permission.duplicatePeriodRead;
|
||||
case r'duplicate.delete': return Permission.duplicatePeriodDelete;
|
||||
case r'face.create': return Permission.facePeriodCreate;
|
||||
|
||||
@@ -14,25 +14,41 @@ class SetMaintenanceModeDto {
|
||||
/// Returns a new [SetMaintenanceModeDto] instance.
|
||||
SetMaintenanceModeDto({
|
||||
required this.action,
|
||||
this.restoreBackupFilename,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? restoreBackupFilename;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SetMaintenanceModeDto &&
|
||||
other.action == action;
|
||||
other.action == action &&
|
||||
other.restoreBackupFilename == restoreBackupFilename;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode);
|
||||
(action.hashCode) +
|
||||
(restoreBackupFilename == null ? 0 : restoreBackupFilename!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action]';
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action, restoreBackupFilename=$restoreBackupFilename]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
if (this.restoreBackupFilename != null) {
|
||||
json[r'restoreBackupFilename'] = this.restoreBackupFilename;
|
||||
} else {
|
||||
// json[r'restoreBackupFilename'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -46,6 +62,7 @@ class SetMaintenanceModeDto {
|
||||
|
||||
return SetMaintenanceModeDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
restoreBackupFilename: mapValueOfType<String>(json, r'restoreBackupFilename'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
97
mobile/openapi/lib/model/storage_folder.dart
generated
Normal file
97
mobile/openapi/lib/model/storage_folder.dart
generated
Normal file
@@ -0,0 +1,97 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class StorageFolder {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const StorageFolder._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const encodedVideo = StorageFolder._(r'encoded-video');
|
||||
static const library_ = StorageFolder._(r'library');
|
||||
static const upload = StorageFolder._(r'upload');
|
||||
static const profile = StorageFolder._(r'profile');
|
||||
static const thumbs = StorageFolder._(r'thumbs');
|
||||
static const backups = StorageFolder._(r'backups');
|
||||
|
||||
/// List of all possible values in this [enum][StorageFolder].
|
||||
static const values = <StorageFolder>[
|
||||
encodedVideo,
|
||||
library_,
|
||||
upload,
|
||||
profile,
|
||||
thumbs,
|
||||
backups,
|
||||
];
|
||||
|
||||
static StorageFolder? fromJson(dynamic value) => StorageFolderTypeTransformer().decode(value);
|
||||
|
||||
static List<StorageFolder> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <StorageFolder>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = StorageFolder.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [StorageFolder] to String,
|
||||
/// and [decode] dynamic data back to [StorageFolder].
|
||||
class StorageFolderTypeTransformer {
|
||||
factory StorageFolderTypeTransformer() => _instance ??= const StorageFolderTypeTransformer._();
|
||||
|
||||
const StorageFolderTypeTransformer._();
|
||||
|
||||
String encode(StorageFolder data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a StorageFolder.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
StorageFolder? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'encoded-video': return StorageFolder.encodedVideo;
|
||||
case r'library': return StorageFolder.library_;
|
||||
case r'upload': return StorageFolder.upload;
|
||||
case r'profile': return StorageFolder.profile;
|
||||
case r'thumbs': return StorageFolder.thumbs;
|
||||
case r'backups': return StorageFolder.backups;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [StorageFolderTypeTransformer] instance.
|
||||
static StorageFolderTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
@@ -322,6 +322,237 @@
|
||||
"x-immich-state": "Stable"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups": {
|
||||
"delete": {
|
||||
"description": "Delete a backup by its filename",
|
||||
"operationId": "deleteDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupDeleteDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Delete database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.delete",
|
||||
"x-immich-state": "Alpha"
|
||||
},
|
||||
"get": {
|
||||
"description": "Get the list of the successful and failed backups",
|
||||
"operationId": "listDatabaseBackups",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupListResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "List database backups",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/start-restore": {
|
||||
"post": {
|
||||
"description": "Put Immich into maintenance mode to restore a backup (Immich must not be configured)",
|
||||
"operationId": "startDatabaseRestoreFlow",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Start database backup restore flow",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/upload": {
|
||||
"post": {
|
||||
"description": "Uploads .sql/.sql.gz file to restore backup from",
|
||||
"operationId": "uploadDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupUploadDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Backup Upload",
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Upload database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.upload",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/{filename}": {
|
||||
"get": {
|
||||
"description": "Downloads the database backup file",
|
||||
"operationId": "downloadDatabaseBackup",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "filename",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "string",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Download database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.download",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance": {
|
||||
"post": {
|
||||
"description": "Put Immich into or take it out of maintenance mode",
|
||||
@@ -372,6 +603,53 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/detect-install": {
|
||||
"get": {
|
||||
"description": "Collect integrity checks and other heuristics about local data.",
|
||||
"operationId": "detectPriorInstall",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Detect existing install",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/login": {
|
||||
"post": {
|
||||
"description": "Login with maintenance token or cookie to receive current information and perform further actions.",
|
||||
@@ -416,6 +694,40 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/status": {
|
||||
"get": {
|
||||
"description": "Fetch information about the currently running maintenance action.",
|
||||
"operationId": "getMaintenanceStatus",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceStatusResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Get maintenance mode status",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.5.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/notifications": {
|
||||
"post": {
|
||||
"description": "Create a new notification for a specific user.",
|
||||
@@ -14661,6 +14973,10 @@
|
||||
"name": "Authentication (admin)",
|
||||
"description": "Administrative endpoints related to authentication."
|
||||
},
|
||||
{
|
||||
"name": "Database Backups (admin)",
|
||||
"description": "Manage backups of the Immich database."
|
||||
},
|
||||
{
|
||||
"name": "Deprecated",
|
||||
"description": "Deprecated endpoints that are planned for removal in the next major release."
|
||||
@@ -16849,6 +17165,58 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupDeleteDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupDto": {
|
||||
"properties": {
|
||||
"filename": {
|
||||
"type": "string"
|
||||
},
|
||||
"filesize": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filename",
|
||||
"filesize"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupListResponseDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupDto"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupUploadDto": {
|
||||
"properties": {
|
||||
"file": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"DownloadArchiveInfo": {
|
||||
"properties": {
|
||||
"assetIds": {
|
||||
@@ -17516,7 +17884,9 @@
|
||||
"MaintenanceAction": {
|
||||
"enum": [
|
||||
"start",
|
||||
"end"
|
||||
"end",
|
||||
"select_database_restore",
|
||||
"restore_database"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -17531,6 +17901,47 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallResponseDto": {
|
||||
"properties": {
|
||||
"storage": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallStorageFolderDto"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"storage"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallStorageFolderDto": {
|
||||
"properties": {
|
||||
"files": {
|
||||
"type": "number"
|
||||
},
|
||||
"folder": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/StorageFolder"
|
||||
}
|
||||
]
|
||||
},
|
||||
"readable": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"writable": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"folder",
|
||||
"readable",
|
||||
"writable"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceLoginDto": {
|
||||
"properties": {
|
||||
"token": {
|
||||
@@ -17539,6 +17950,34 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceStatusResponseDto": {
|
||||
"properties": {
|
||||
"action": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"active": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"error": {
|
||||
"type": "string"
|
||||
},
|
||||
"progress": {
|
||||
"type": "number"
|
||||
},
|
||||
"task": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"action",
|
||||
"active"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManualJobName": {
|
||||
"enum": [
|
||||
"person-cleanup",
|
||||
@@ -18507,6 +18946,10 @@
|
||||
"auth.changePassword",
|
||||
"authDevice.delete",
|
||||
"archive.read",
|
||||
"backup.list",
|
||||
"backup.download",
|
||||
"backup.upload",
|
||||
"backup.delete",
|
||||
"duplicate.read",
|
||||
"duplicate.delete",
|
||||
"face.create",
|
||||
@@ -20285,6 +20728,9 @@
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"restoreBackupFilename": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -20857,6 +21303,17 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"StorageFolder": {
|
||||
"enum": [
|
||||
"encoded-video",
|
||||
"library",
|
||||
"upload",
|
||||
"profile",
|
||||
"thumbs",
|
||||
"backups"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SyncAckDeleteDto": {
|
||||
"properties": {
|
||||
"types": {
|
||||
|
||||
@@ -40,8 +40,31 @@ export type ActivityStatisticsResponseDto = {
|
||||
comments: number;
|
||||
likes: number;
|
||||
};
|
||||
export type DatabaseBackupDeleteDto = {
|
||||
backups: string[];
|
||||
};
|
||||
export type DatabaseBackupDto = {
|
||||
filename: string;
|
||||
filesize: number;
|
||||
};
|
||||
export type DatabaseBackupListResponseDto = {
|
||||
backups: DatabaseBackupDto[];
|
||||
};
|
||||
export type DatabaseBackupUploadDto = {
|
||||
file?: Blob;
|
||||
};
|
||||
export type SetMaintenanceModeDto = {
|
||||
action: MaintenanceAction;
|
||||
restoreBackupFilename?: string;
|
||||
};
|
||||
export type MaintenanceDetectInstallStorageFolderDto = {
|
||||
files: number;
|
||||
folder: StorageFolder;
|
||||
readable: boolean;
|
||||
writable: boolean;
|
||||
};
|
||||
export type MaintenanceDetectInstallResponseDto = {
|
||||
storage: MaintenanceDetectInstallStorageFolderDto[];
|
||||
};
|
||||
export type MaintenanceLoginDto = {
|
||||
token?: string;
|
||||
@@ -49,6 +72,13 @@ export type MaintenanceLoginDto = {
|
||||
export type MaintenanceAuthDto = {
|
||||
username: string;
|
||||
};
|
||||
export type MaintenanceStatusResponseDto = {
|
||||
action: MaintenanceAction;
|
||||
active: boolean;
|
||||
error?: string;
|
||||
progress?: number;
|
||||
task?: string;
|
||||
};
|
||||
export type NotificationCreateDto = {
|
||||
data?: object;
|
||||
description?: string | null;
|
||||
@@ -1920,6 +1950,63 @@ export function unlinkAllOAuthAccountsAdmin(opts?: Oazapfts.RequestOpts) {
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Delete database backup
|
||||
*/
|
||||
export function deleteDatabaseBackup({ databaseBackupDeleteDto }: {
|
||||
databaseBackupDeleteDto: DatabaseBackupDeleteDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups", oazapfts.json({
|
||||
...opts,
|
||||
method: "DELETE",
|
||||
body: databaseBackupDeleteDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* List database backups
|
||||
*/
|
||||
export function listDatabaseBackups(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: DatabaseBackupListResponseDto;
|
||||
}>("/admin/database-backups", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Start database backup restore flow
|
||||
*/
|
||||
export function startDatabaseRestoreFlow(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/start-restore", {
|
||||
...opts,
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Upload database backup
|
||||
*/
|
||||
export function uploadDatabaseBackup({ databaseBackupUploadDto }: {
|
||||
databaseBackupUploadDto: DatabaseBackupUploadDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/upload", oazapfts.multipart({
|
||||
...opts,
|
||||
method: "POST",
|
||||
body: databaseBackupUploadDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Download database backup
|
||||
*/
|
||||
export function downloadDatabaseBackup({ filename }: {
|
||||
filename: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchBlob<{
|
||||
status: 200;
|
||||
data: Blob;
|
||||
}>(`/admin/database-backups/${encodeURIComponent(filename)}`, {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Set maintenance mode
|
||||
*/
|
||||
@@ -1932,6 +2019,17 @@ export function setMaintenanceMode({ setMaintenanceModeDto }: {
|
||||
body: setMaintenanceModeDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Detect existing install
|
||||
*/
|
||||
export function detectPriorInstall(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceDetectInstallResponseDto;
|
||||
}>("/admin/maintenance/detect-install", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Log into maintenance mode
|
||||
*/
|
||||
@@ -1947,6 +2045,17 @@ export function maintenanceLogin({ maintenanceLoginDto }: {
|
||||
body: maintenanceLoginDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Get maintenance mode status
|
||||
*/
|
||||
export function getMaintenanceStatus(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceStatusResponseDto;
|
||||
}>("/admin/maintenance/status", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Create a notification
|
||||
*/
|
||||
@@ -5297,7 +5406,17 @@ export enum UserAvatarColor {
|
||||
}
|
||||
export enum MaintenanceAction {
|
||||
Start = "start",
|
||||
End = "end"
|
||||
End = "end",
|
||||
SelectDatabaseRestore = "select_database_restore",
|
||||
RestoreDatabase = "restore_database"
|
||||
}
|
||||
export enum StorageFolder {
|
||||
EncodedVideo = "encoded-video",
|
||||
Library = "library",
|
||||
Upload = "upload",
|
||||
Profile = "profile",
|
||||
Thumbs = "thumbs",
|
||||
Backups = "backups"
|
||||
}
|
||||
export enum NotificationLevel {
|
||||
Success = "success",
|
||||
@@ -5395,6 +5514,10 @@ export enum Permission {
|
||||
AuthChangePassword = "auth.changePassword",
|
||||
AuthDeviceDelete = "authDevice.delete",
|
||||
ArchiveRead = "archive.read",
|
||||
BackupList = "backup.list",
|
||||
BackupDownload = "backup.download",
|
||||
BackupUpload = "backup.upload",
|
||||
BackupDelete = "backup.delete",
|
||||
DuplicateRead = "duplicate.read",
|
||||
DuplicateDelete = "duplicate.delete",
|
||||
FaceCreate = "face.create",
|
||||
|
||||
@@ -10,6 +10,7 @@ import { IWorker } from 'src/constants';
|
||||
import { controllers } from 'src/controllers';
|
||||
import { ImmichWorker } from 'src/enum';
|
||||
import { MaintenanceAuthGuard } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerController } from 'src/maintenance/maintenance-worker.controller';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
@@ -21,8 +22,11 @@ import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
|
||||
import { repositories } from 'src/repositories';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
@@ -103,8 +107,12 @@ export class ApiModule extends BaseModule {}
|
||||
providers: [
|
||||
ConfigRepository,
|
||||
LoggingRepository,
|
||||
StorageRepository,
|
||||
ProcessRepository,
|
||||
DatabaseRepository,
|
||||
SystemMetadataRepository,
|
||||
AppRepository,
|
||||
MaintenanceHealthRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
MaintenanceWorkerService,
|
||||
...commonMiddleware,
|
||||
@@ -116,9 +124,14 @@ export class MaintenanceModule {
|
||||
constructor(
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private maintenanceWorkerService: MaintenanceWorkerService,
|
||||
) {
|
||||
logger.setAppName(this.worker);
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.maintenanceWorkerService.init();
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -141,6 +141,7 @@ export const endpointTags: Record<ApiTag, string> = {
|
||||
[ApiTag.Assets]: 'An asset is an image or video that has been uploaded to Immich.',
|
||||
[ApiTag.Authentication]: 'Endpoints related to user authentication, including OAuth.',
|
||||
[ApiTag.AuthenticationAdmin]: 'Administrative endpoints related to authentication.',
|
||||
[ApiTag.DatabaseBackups]: 'Manage backups of the Immich database.',
|
||||
[ApiTag.Deprecated]: 'Deprecated endpoints that are planned for removal in the next major release.',
|
||||
[ApiTag.Download]: 'Endpoints for downloading assets or collections of assets.',
|
||||
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
|
||||
|
||||
101
server/src/controllers/database-backup.controller.ts
Normal file
101
server/src/controllers/database-backup.controller.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { Body, Controller, Delete, Get, Next, Param, Post, Res, UploadedFile, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { ApiBody, ApiConsumes, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
DatabaseBackupDeleteDto,
|
||||
DatabaseBackupListResponseDto,
|
||||
DatabaseBackupUploadDto,
|
||||
} from 'src/dtos/database-backup.dto';
|
||||
import { ApiTag, ImmichCookie, Permission } from 'src/enum';
|
||||
import { Authenticated, FileResponse, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags(ApiTag.DatabaseBackups)
|
||||
@Controller('admin/database-backups')
|
||||
export class DatabaseBackupController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: DatabaseBackupService,
|
||||
private maintenanceService: MaintenanceService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@Endpoint({
|
||||
summary: 'List database backups',
|
||||
description: 'Get the list of the successful and failed backups',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
@Get(':filename')
|
||||
@FileResponse()
|
||||
@Endpoint({
|
||||
summary: 'Download database backup',
|
||||
description: 'Downloads the database backup file',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDownload, admin: true })
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
): Promise<void> {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
@Delete()
|
||||
@Endpoint({
|
||||
summary: 'Delete database backup',
|
||||
description: 'Delete a backup by its filename',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDelete, admin: true })
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
@Post('start-restore')
|
||||
@Endpoint({
|
||||
summary: 'Start database backup restore flow',
|
||||
description: 'Put Immich into maintenance mode to restore a backup (Immich must not be configured)',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
async startDatabaseRestoreFlow(
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
const { jwt } = await this.maintenanceService.startRestoreFlow();
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
});
|
||||
}
|
||||
|
||||
@Post('upload')
|
||||
@Authenticated({ permission: Permission.BackupUpload, admin: true })
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiBody({ description: 'Backup Upload', type: DatabaseBackupUploadDto })
|
||||
@Endpoint({
|
||||
summary: 'Upload database backup',
|
||||
description: 'Uploads .sql/.sql.gz file to restore backup from',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
import { DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import { DownloadController } from 'src/controllers/download.controller';
|
||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||
import { FaceController } from 'src/controllers/face.controller';
|
||||
@@ -46,6 +47,7 @@ export const controllers = [
|
||||
AssetMediaController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DatabaseBackupController,
|
||||
DownloadController,
|
||||
DuplicateController,
|
||||
FaceController,
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { BadRequestException, Body, Controller, Post, Res } from '@nestjs/common';
|
||||
import { BadRequestException, Body, Controller, Get, Post, Res } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ApiTag, ImmichCookie, MaintenanceAction, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
@@ -15,6 +21,27 @@ import { respondWithCookie } from 'src/utils/response';
|
||||
export class MaintenanceController {
|
||||
constructor(private service: MaintenanceService) {}
|
||||
|
||||
@Get('status')
|
||||
@Endpoint({
|
||||
summary: 'Get maintenance mode status',
|
||||
description: 'Fetch information about the currently running maintenance action.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return this.service.getMaintenanceStatus();
|
||||
}
|
||||
|
||||
@Get('detect-install')
|
||||
@Endpoint({
|
||||
summary: 'Detect existing install',
|
||||
description: 'Collect integrity checks and other heuristics about local data.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@Endpoint({
|
||||
summary: 'Log into maintenance mode',
|
||||
@@ -38,8 +65,8 @@ export class MaintenanceController {
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.Start) {
|
||||
const { jwt } = await this.service.startMaintenance(auth.user.name);
|
||||
if (dto.action !== MaintenanceAction.End) {
|
||||
const { jwt } = await this.service.startMaintenance(dto, auth.user.name);
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
|
||||
21
server/src/dtos/database-backup.dto.ts
Normal file
21
server/src/dtos/database-backup.dto.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
|
||||
export class DatabaseBackupDto {
|
||||
filename!: string;
|
||||
filesize!: number;
|
||||
}
|
||||
|
||||
export class DatabaseBackupListResponseDto {
|
||||
backups!: DatabaseBackupDto[];
|
||||
}
|
||||
|
||||
export class DatabaseBackupUploadDto {
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
file?: any;
|
||||
}
|
||||
|
||||
export class DatabaseBackupDeleteDto {
|
||||
@IsString({ each: true })
|
||||
backups!: string[];
|
||||
}
|
||||
@@ -1,9 +1,14 @@
|
||||
import { MaintenanceAction } from 'src/enum';
|
||||
import { ValidateIf } from 'class-validator';
|
||||
import { MaintenanceAction, StorageFolder } from 'src/enum';
|
||||
import { ValidateEnum, ValidateString } from 'src/validation';
|
||||
|
||||
export class SetMaintenanceModeDto {
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
@ValidateIf((o) => o.action === MaintenanceAction.RestoreDatabase)
|
||||
@ValidateString()
|
||||
restoreBackupFilename?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceLoginDto {
|
||||
@@ -14,3 +19,26 @@ export class MaintenanceLoginDto {
|
||||
export class MaintenanceAuthDto {
|
||||
username!: string;
|
||||
}
|
||||
|
||||
export class MaintenanceStatusResponseDto {
|
||||
active!: boolean;
|
||||
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
progress?: number;
|
||||
task?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallStorageFolderDto {
|
||||
@ValidateEnum({ enum: StorageFolder, name: 'StorageFolder' })
|
||||
folder!: StorageFolder;
|
||||
readable!: boolean;
|
||||
writable!: boolean;
|
||||
files!: number;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallResponseDto {
|
||||
storage!: MaintenanceDetectInstallStorageFolderDto[];
|
||||
}
|
||||
|
||||
@@ -136,6 +136,11 @@ export enum Permission {
|
||||
|
||||
ArchiveRead = 'archive.read',
|
||||
|
||||
BackupList = 'backup.list',
|
||||
BackupDownload = 'backup.download',
|
||||
BackupUpload = 'backup.upload',
|
||||
BackupDelete = 'backup.delete',
|
||||
|
||||
DuplicateRead = 'duplicate.read',
|
||||
DuplicateDelete = 'duplicate.delete',
|
||||
|
||||
@@ -697,12 +702,15 @@ export enum DatabaseLock {
|
||||
MediaLocation = 700,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
MaintenanceOperation = 621,
|
||||
MemoryCreation = 777,
|
||||
}
|
||||
|
||||
export enum MaintenanceAction {
|
||||
Start = 'start',
|
||||
End = 'end',
|
||||
SelectDatabaseRestore = 'select_database_restore',
|
||||
RestoreDatabase = 'restore_database',
|
||||
}
|
||||
|
||||
export enum ExitCode {
|
||||
@@ -849,6 +857,7 @@ export enum ApiTag {
|
||||
Authentication = 'Authentication',
|
||||
AuthenticationAdmin = 'Authentication (admin)',
|
||||
Assets = 'Assets',
|
||||
DatabaseBackups = 'Database Backups (admin)',
|
||||
Deprecated = 'Deprecated',
|
||||
Download = 'Download',
|
||||
Duplicates = 'Duplicates',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
import { ChildProcess, fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PostgresError } from 'postgres';
|
||||
import { ImmichAdminModule } from 'src/app.module';
|
||||
import { ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { DatabaseLock, ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type DB } from 'src/schema';
|
||||
@@ -35,19 +35,18 @@ class Workers {
|
||||
if (isMaintenanceMode) {
|
||||
this.startWorker(ImmichWorker.Maintenance);
|
||||
} else {
|
||||
await this.waitForFreeLock();
|
||||
|
||||
for (const worker of workers) {
|
||||
this.startWorker(worker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise a short-lived Nest application to build configuration
|
||||
* @returns System configuration
|
||||
*/
|
||||
private async isMaintenanceMode(): Promise<boolean> {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
const { log: _, ...kyselyConfig } = getKyselyConfig(database.config);
|
||||
const kysely = new Kysely<DB>(kyselyConfig);
|
||||
const systemMetadataRepository = new SystemMetadataRepository(kysely);
|
||||
|
||||
try {
|
||||
@@ -65,6 +64,32 @@ class Workers {
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForFreeLock() {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
|
||||
let locked = false;
|
||||
while (!locked) {
|
||||
locked = await kysely.connection().execute(async (conn) => {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
}>`SELECT pg_try_advisory_lock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
|
||||
const isLocked = rows[0].pg_try_advisory_lock;
|
||||
|
||||
if (isLocked) {
|
||||
await sql`SELECT pg_advisory_unlock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
}
|
||||
|
||||
return isLocked;
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await kysely.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start an individual worker
|
||||
* @param name Worker
|
||||
|
||||
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
@Injectable()
|
||||
export class MaintenanceHealthRepository {
|
||||
checkApiHealth(): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
const basePath = dirname(__filename);
|
||||
const workerFile = join(basePath, '..', 'workers', `api.js`);
|
||||
|
||||
const worker = fork(workerFile, [], {
|
||||
execArgv: process.execArgv.filter((arg) => !arg.startsWith('--inspect')),
|
||||
env: {
|
||||
...process.env,
|
||||
IMMICH_HOST: '127.0.0.1',
|
||||
IMMICH_PORT: '33001',
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'ignore', 'ipc'],
|
||||
});
|
||||
|
||||
async function checkHealth() {
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:33001/api/server/config');
|
||||
const { isOnboarded } = await response.json();
|
||||
if (isOnboarded) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error('Server health check failed, no admin exists.'));
|
||||
}
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
} finally {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = '',
|
||||
alive = false;
|
||||
|
||||
worker.stdout?.on('data', (data) => {
|
||||
if (alive) {
|
||||
return;
|
||||
}
|
||||
|
||||
output += data;
|
||||
|
||||
if (output.includes('Immich Server is listening')) {
|
||||
alive = true;
|
||||
void checkHealth();
|
||||
}
|
||||
});
|
||||
|
||||
worker.on('exit', reject);
|
||||
worker.on('error', reject);
|
||||
|
||||
setTimeout(() => {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}, 20_000);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -7,17 +7,24 @@ import {
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { MaintenanceAuthDto, MaintenanceStatusResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AppRestartEvent, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
export const serverEvents = ['AppRestart'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
interface ServerEventMap {
|
||||
AppRestart: [AppRestartEvent];
|
||||
MaintenanceStatus: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
MaintenanceStatusV1: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
type AuthFn = (client: Socket) => Promise<MaintenanceAuthDto>;
|
||||
type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
@@ -25,8 +32,11 @@ export interface ClientEventMap {
|
||||
})
|
||||
@Injectable()
|
||||
export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
private statusUpdateFn?: StatusUpdateFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private websocketServer?: Server;
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
@@ -35,10 +45,10 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
this.logger.setContext(MaintenanceWebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(websocketServer: Server) {
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
|
||||
websocketServer.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
server.on('MaintenanceStatus', (status) => this.statusUpdateFn?.(status));
|
||||
server.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
ack?.('ok');
|
||||
@@ -46,20 +56,40 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
});
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.websocketServer?.emit(event, ...data);
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
serverSend<T extends keyof ServerEventMap>(event: T, ...args: ServerEventMap[T]): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.websocketServer?.serverSideEmit(event, ...args);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
handleConnection(client: Socket) {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
await this.authFn!(client);
|
||||
await client.join('private');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (private)`);
|
||||
} catch {
|
||||
await client.join('public');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (public)`);
|
||||
}
|
||||
}
|
||||
|
||||
handleDisconnect(client: Socket) {
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await Promise.allSettled([client.leave('private'), client.leave('public')]);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<MaintenanceAuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
setStatusUpdateFn(fn: (status: MaintenanceStatusResponseDto) => void) {
|
||||
this.statusUpdateFn = fn;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,114 @@
|
||||
import { Body, Controller, Get, Post, Req, Res } from '@nestjs/common';
|
||||
import { Request, Response } from 'express';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie, MaintenanceAction } from 'src/enum';
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Next,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie } from 'src/enum';
|
||||
import { MaintenanceRoute } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(private service: MaintenanceWorkerService) {}
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* {@link _ServerController.getServerConfig }
|
||||
*/
|
||||
@Get('server/config')
|
||||
getServerConfig(): Promise<ServerConfigDto> {
|
||||
getServerConfig(): ServerConfigDto {
|
||||
return this.service.getSystemConfig();
|
||||
}
|
||||
|
||||
@Get('server/version')
|
||||
getServerVersion(): ServerVersionResponseDto {
|
||||
return this.service.getVersion();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.listDatabaseBackups}
|
||||
*/
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.downloadDatabaseBackup}
|
||||
*/
|
||||
@Get('admin/database-backups/:filename')
|
||||
@MaintenanceRoute()
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.deleteDatabaseBackup}
|
||||
*/
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.uploadDatabaseBackup}
|
||||
*/
|
||||
@Post('admin/database-backups/upload')
|
||||
@MaintenanceRoute()
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
maintenanceStatus(@Req() request: Request): Promise<MaintenanceStatusResponseDto> {
|
||||
return this.service.status(request.cookies[ImmichCookie.MaintenanceToken]);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/detect-install')
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('admin/maintenance/login')
|
||||
async maintenanceLogin(
|
||||
@Req() request: Request,
|
||||
@@ -35,9 +126,7 @@ export class MaintenanceWorkerController {
|
||||
|
||||
@Post('admin/maintenance')
|
||||
@MaintenanceRoute()
|
||||
async setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.End) {
|
||||
await this.service.endMaintenance();
|
||||
}
|
||||
setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): void {
|
||||
void this.service.setAction(dto);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,51 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, getMocks, ServiceMocks } from 'test/utils';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWorkerRepositoryMock: MaintenanceWebsocketRepository;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
maintenanceWorkerRepositoryMock = automock(MaintenanceWebsocketRepository, { args: [mocks.logger], strict: false });
|
||||
maintenanceWebsocketRepositoryMock = automock(MaintenanceWebsocketRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
maintenanceHealthRepositoryMock = automock(MaintenanceHealthRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
mocks.app,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
maintenanceWorkerRepositoryMock,
|
||||
maintenanceWebsocketRepositoryMock,
|
||||
maintenanceHealthRepositoryMock,
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -27,14 +53,43 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
describe('getSystemConfig', () => {
|
||||
it('should respond the server is in maintenance mode', async () => {
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject(
|
||||
it('should respond the server is in maintenance mode', () => {
|
||||
expect(sut.getSystemConfig()).toMatchObject(
|
||||
expect.objectContaining({
|
||||
maintenanceMode: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('ssr');
|
||||
describe.skip('detectMediaLocation');
|
||||
|
||||
describe('setStatus', () => {
|
||||
it('should broadcast status', () => {
|
||||
sut.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalled();
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledTimes(2);
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'Something went wrong, see logs!',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -42,7 +97,14 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should log a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.logSecret()).resolves.toBeUndefined();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith(expect.stringMatching(RE_LOGIN_URL));
|
||||
|
||||
@@ -63,7 +125,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should parse cookie properly', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
@@ -73,13 +141,102 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('status', () => {
|
||||
beforeEach(() => {
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
error: 'secret value!',
|
||||
});
|
||||
});
|
||||
|
||||
it('generates private status', async () => {
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime('4h')
|
||||
.sign(new TextEncoder().encode('secret'));
|
||||
|
||||
await expect(sut.status(jwt)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'secret value!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates public status', async () => {
|
||||
await expect(sut.status()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectPriorInstall', () => {
|
||||
it('generate report about prior installation', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should fail without token', async () => {
|
||||
await expect(sut.login()).rejects.toThrowError(new UnauthorizedException('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should fail with expired JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({})
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -91,7 +248,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should succeed with valid JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -107,22 +270,275 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('endMaintenance', () => {
|
||||
describe.skip('setAction'); // just calls setStatus+runAction
|
||||
|
||||
/**
|
||||
* Actions
|
||||
*/
|
||||
|
||||
describe('action: start', () => {
|
||||
it('should not do anything', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
|
||||
expect(mocks.logger.log).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: end', () => {
|
||||
it('should set maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
await expect(sut.endMaintenance()).resolves.toBeUndefined();
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(mocks.database.tryLock).toHaveBeenCalled();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith('Running maintenance action restore_database');
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: expect.any(Number),
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should rollback if database migrations fail', async () => {
|
||||
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Migrations Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should rollback if API healthcheck fails', async () => {
|
||||
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Health Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,19 +4,41 @@ import { NextFunction, Request, Response } from 'express';
|
||||
import { jwtVerify } from 'jose';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { ImmichCookie, SystemMetadataKey } from 'src/enum';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { DatabaseLock, ImmichCookie, MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { type VersionService as _VersionService } from 'src/services/version.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import { createMaintenanceLoginUrl } from 'src/utils/maintenance';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -24,16 +46,51 @@ import { getExternalDomain } from 'src/utils/misc';
|
||||
*/
|
||||
@Injectable()
|
||||
export class MaintenanceWorkerService {
|
||||
#secret: string | null = null;
|
||||
#status: MaintenanceStatusResponseDto = {
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
};
|
||||
|
||||
constructor(
|
||||
protected logger: LoggingRepository,
|
||||
private appRepository: AppRepository,
|
||||
private configRepository: ConfigRepository,
|
||||
private systemMetadataRepository: SystemMetadataRepository,
|
||||
private maintenanceWorkerRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceWebsocketRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceHealthRepository: MaintenanceHealthRepository,
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
mock(status: MaintenanceStatusResponseDto) {
|
||||
this.#secret = 'secret';
|
||||
this.#status = status;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const state = (await this.systemMetadataRepository.get(
|
||||
SystemMetadataKey.MaintenanceMode,
|
||||
)) as MaintenanceModeState & { isMaintenanceMode: true };
|
||||
|
||||
this.#secret = state.secret;
|
||||
this.#status = {
|
||||
active: true,
|
||||
action: state.action.action,
|
||||
};
|
||||
|
||||
StorageCore.setMediaLocation(this.detectMediaLocation());
|
||||
|
||||
this.maintenanceWebsocketRepository.setAuthFn(async (client) => this.authenticate(client.request.headers));
|
||||
this.maintenanceWebsocketRepository.setStatusUpdateFn((status) => (this.#status = status));
|
||||
|
||||
await this.logSecret();
|
||||
void this.runAction(state.action);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _BaseService.configRepos}
|
||||
*/
|
||||
@@ -55,22 +112,17 @@ export class MaintenanceWorkerService {
|
||||
/**
|
||||
* {@link _ServerService.getSystemConfig}
|
||||
*/
|
||||
async getSystemConfig() {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
|
||||
getSystemConfig() {
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized: true,
|
||||
isOnboarded: true,
|
||||
externalDomain: config.server.externalDomain,
|
||||
publicUsers: config.server.publicUsers,
|
||||
mapDarkStyleUrl: config.map.darkStyle,
|
||||
mapLightStyleUrl: config.map.lightStyle,
|
||||
maintenanceMode: true,
|
||||
};
|
||||
} as ServerConfigDto;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _VersionService.getVersion}
|
||||
*/
|
||||
getVersion() {
|
||||
return ServerVersionResponseDto.fromSemVer(serverVersion);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,12 +158,99 @@ export class MaintenanceWorkerService {
|
||||
};
|
||||
}
|
||||
|
||||
private async secret(): Promise<string> {
|
||||
const state = (await this.systemMetadataRepository.get(SystemMetadataKey.MaintenanceMode)) as {
|
||||
secret: string;
|
||||
};
|
||||
/**
|
||||
* {@link _StorageService.detectMediaLocation}
|
||||
*/
|
||||
detectMediaLocation(): string {
|
||||
const envData = this.configRepository.getEnv();
|
||||
if (envData.storage.mediaLocation) {
|
||||
return envData.storage.mediaLocation;
|
||||
}
|
||||
|
||||
return state.secret;
|
||||
const targets: string[] = [];
|
||||
const candidates = ['/data', '/usr/src/app/upload'];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const exists = this.storageRepository.existsSync(candidate);
|
||||
if (exists) {
|
||||
targets.push(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.length === 1) {
|
||||
return targets[0];
|
||||
}
|
||||
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: { filename: string; filesize: number }[] }> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get secret() {
|
||||
if (!this.#secret) {
|
||||
throw new Error('Secret is not initialised yet.');
|
||||
}
|
||||
|
||||
return this.#secret;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
health: this.maintenanceHealthRepository,
|
||||
};
|
||||
}
|
||||
|
||||
private getStatus(): MaintenanceStatusResponseDto {
|
||||
return this.#status;
|
||||
}
|
||||
|
||||
private getPublicStatus(): MaintenanceStatusResponseDto {
|
||||
const state = structuredClone(this.#status);
|
||||
|
||||
if (state.error) {
|
||||
state.error = 'Something went wrong, see logs!';
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
setStatus(status: MaintenanceStatusResponseDto): void {
|
||||
this.#status = status;
|
||||
this.maintenanceWebsocketRepository.serverSend('MaintenanceStatus', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'private', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'public', this.getPublicStatus());
|
||||
}
|
||||
|
||||
async logSecret(): Promise<void> {
|
||||
@@ -123,7 +262,7 @@ export class MaintenanceWorkerService {
|
||||
{
|
||||
username: 'immich-admin',
|
||||
},
|
||||
await this.secret(),
|
||||
this.secret,
|
||||
);
|
||||
|
||||
this.logger.log(`\n\n🚧 Immich is in maintenance mode, you can log in using the following URL:\n${url}\n`);
|
||||
@@ -134,28 +273,115 @@ export class MaintenanceWorkerService {
|
||||
return this.login(jwtToken);
|
||||
}
|
||||
|
||||
async status(potentiallyJwt?: string): Promise<MaintenanceStatusResponseDto> {
|
||||
try {
|
||||
await this.login(potentiallyJwt);
|
||||
return this.getStatus();
|
||||
} catch {
|
||||
return this.getPublicStatus();
|
||||
}
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async login(jwt?: string): Promise<MaintenanceAuthDto> {
|
||||
if (!jwt) {
|
||||
throw new UnauthorizedException('Missing JWT Token');
|
||||
}
|
||||
|
||||
const secret = await this.secret();
|
||||
|
||||
try {
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(secret));
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(this.secret));
|
||||
return result.payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid JWT Token');
|
||||
}
|
||||
}
|
||||
|
||||
async endMaintenance(): Promise<void> {
|
||||
async setAction(action: SetMaintenanceModeDto) {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
});
|
||||
|
||||
await this.runAction(action);
|
||||
}
|
||||
|
||||
async runAction(action: SetMaintenanceModeDto) {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.Start: {
|
||||
return;
|
||||
}
|
||||
case MaintenanceAction.End: {
|
||||
return this.endMaintenance();
|
||||
}
|
||||
case MaintenanceAction.SelectDatabaseRestore: {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const lock = await this.databaseRepository.tryLock(DatabaseLock.MaintenanceOperation);
|
||||
if (!lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Running maintenance action ${action.action}`);
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: this.secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
if (!action.restoreBackupFilename) {
|
||||
throw new Error("Expected restoreBackupFilename but it's missing!");
|
||||
}
|
||||
|
||||
await this.restoreBackup(action.restoreBackupFilename);
|
||||
} catch (error) {
|
||||
this.logger.error(`Encountered error running action: ${error}`);
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
task: 'error',
|
||||
error: '' + error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreBackup(filename: string): Promise<void> {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
progress,
|
||||
task,
|
||||
}),
|
||||
);
|
||||
|
||||
await this.setAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
}
|
||||
|
||||
private async endMaintenance(): Promise<void> {
|
||||
const state: MaintenanceModeState = { isMaintenanceMode: false as const };
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, state);
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
this.maintenanceWorkerRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWorkerRepository.serverSend('AppRestart', state);
|
||||
this.maintenanceWebsocketRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWebsocketRepository.serverSend('AppRestart', state);
|
||||
this.appRepository.exitApp();
|
||||
}
|
||||
}
|
||||
|
||||
85
server/src/repositories/process.repository.spec.ts
Normal file
85
server/src/repositories/process.repository.spec.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
|
||||
function* data() {
|
||||
yield 'Hello, world!';
|
||||
}
|
||||
|
||||
describe(ProcessRepository.name, () => {
|
||||
let sut: ProcessRepository;
|
||||
let sink: Writable;
|
||||
|
||||
beforeAll(() => {
|
||||
sut = new ProcessRepository();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sink = new Writable({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should accept stdin / output stdout', async () => {
|
||||
let output = '';
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
output += chunk;
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.spawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should drain stdin on process exit', async () => {
|
||||
let resolve1: () => void;
|
||||
let resolve2: () => void;
|
||||
const promise1 = new Promise<void>((r) => (resolve1 = r));
|
||||
const promise2 = new Promise<void>((r) => (resolve2 = r));
|
||||
|
||||
async function* data() {
|
||||
yield 'Hello, world!';
|
||||
await promise1;
|
||||
await promise2;
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
realProcess.stdin.on('close', () => setImmediate(() => resolve2()));
|
||||
|
||||
await pipeline(Readable.from(data()), process);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,110 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { ChildProcessWithoutNullStreams, fork, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Duplex } from 'node:stream';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository {
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
spawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
const process = this.spawn(command, args, options);
|
||||
const duplex = new Duplex({
|
||||
// duplex -> stdin
|
||||
write(chunk, encoding, callback) {
|
||||
// drain the input if process dies
|
||||
if (stdinClosed) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
// handle stream backpressure
|
||||
if (process.stdin.write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
drainCallback = callback;
|
||||
process.stdin.once('drain', () => {
|
||||
drainCallback = undefined;
|
||||
callback();
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
read() {
|
||||
// no-op
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
if (stdinClosed) {
|
||||
callback();
|
||||
} else {
|
||||
process.stdin.end(callback);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// stdout -> duplex
|
||||
process.stdout.on('data', (chunk) => {
|
||||
// handle stream backpressure
|
||||
if (!duplex.push(chunk)) {
|
||||
process.stdout.pause();
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('resume', () => process.stdout.resume());
|
||||
|
||||
// end handling
|
||||
let stdoutClosed = false;
|
||||
function close(error?: Error) {
|
||||
stdinClosed = true;
|
||||
|
||||
if (error) {
|
||||
duplex.destroy(error);
|
||||
} else if (stdoutClosed && typeof process.exitCode === 'number') {
|
||||
duplex.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.on('close', () => {
|
||||
stdoutClosed = true;
|
||||
close();
|
||||
});
|
||||
|
||||
// error handling
|
||||
process.on('error', close);
|
||||
process.stdout.on('error', close);
|
||||
process.stdin.on('error', (error) => {
|
||||
if ((error as { code?: 'EPIPE' })?.code === 'EPIPE') {
|
||||
try {
|
||||
drainCallback!();
|
||||
} catch (error) {
|
||||
close(error as Error);
|
||||
}
|
||||
} else {
|
||||
close(error);
|
||||
}
|
||||
});
|
||||
|
||||
let stderr = '';
|
||||
process.stderr.on('data', (chunk) => (stderr += chunk));
|
||||
|
||||
process.on('exit', (code) => {
|
||||
console.info(`${command} exited (${code})`);
|
||||
|
||||
if (code === 0) {
|
||||
close();
|
||||
} else {
|
||||
close(new Error(`${command} non-zero exit code (${code})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
return Object.assign(duplex, { _process: process });
|
||||
}
|
||||
|
||||
fork(...args: Parameters<typeof fork>): ReturnType<typeof fork> {
|
||||
return fork(...args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,8 @@ import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { createGunzip, createGzip } from 'node:zlib';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -93,6 +94,18 @@ export class StorageRepository {
|
||||
return { stream: archive, addFile, finalize };
|
||||
}
|
||||
|
||||
createGzip(): PassThrough {
|
||||
return createGzip();
|
||||
}
|
||||
|
||||
createGunzip(): PassThrough {
|
||||
return createGunzip();
|
||||
}
|
||||
|
||||
createPlainReadStream(filepath: string): Readable {
|
||||
return createReadStream(filepath);
|
||||
}
|
||||
|
||||
async createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream> {
|
||||
const { size } = await fs.stat(filepath);
|
||||
await fs.access(filepath, constants.R_OK);
|
||||
|
||||
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
@@ -147,6 +147,7 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -165,7 +166,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -174,14 +175,16 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawn).toHaveBeenCalled();
|
||||
const call = mocks.process.spawn.mock.calls[0];
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
// ['--dbname', '<url>', '--clean', '--if-exists']
|
||||
expect(args[0]).toBe('--dbname');
|
||||
const passedUrl = args[1];
|
||||
expect(passedUrl).not.toContain('uselibpqcompat');
|
||||
expect(passedUrl).toContain('sslmode=require');
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
@@ -196,21 +199,21 @@ describe(BackupService.name, () => {
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
@@ -226,9 +229,9 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -242,12 +245,12 @@ describe(BackupService.name, () => {
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -53,16 +56,11 @@ export class BackupService extends BaseService {
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => {
|
||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
@@ -75,123 +73,27 @@ export class BackupService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
const { database } = this.configRepository.getEnv();
|
||||
const config = database.config;
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
|
||||
let connectionUrl: string = isUrlConnection ? config.url : '';
|
||||
if (URL.canParse(connectionUrl)) {
|
||||
// remove known bad url parameters for pg_dumpall
|
||||
const url = new URL(connectionUrl);
|
||||
url.searchParams.delete('uselibpqcompat');
|
||||
connectionUrl = url.toString();
|
||||
}
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', connectionUrl]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? new URL(connectionUrl).password : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error(`Backup failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error(`Gzip failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { jwtVerify } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -95,7 +95,14 @@ describe(CliService.name, () => {
|
||||
});
|
||||
|
||||
it('should disable maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.disableMaintenanceMode()).resolves.toEqual({
|
||||
alreadyDisabled: false,
|
||||
});
|
||||
@@ -109,7 +116,14 @@ describe(CliService.name, () => {
|
||||
|
||||
describe('enableMaintenanceMode', () => {
|
||||
it('should not do anything if in maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.enableMaintenanceMode()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
alreadyEnabled: true,
|
||||
@@ -133,13 +147,22 @@ describe(CliService.name, () => {
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should return a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const result = await sut.enableMaintenanceMode();
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { isAbsolute } from 'node:path';
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
@@ -86,6 +86,9 @@ export class CliService extends BaseService {
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await this.appRepository.sendOneShotAppRestart({
|
||||
|
||||
83
server/src/services/database-backup.service.spec.ts
Normal file
83
server/src/services/database-backup.service.spec.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
43
server/src/services/database-backup.service.ts
Normal file
43
server/src/services/database-backup.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
|
||||
/**
|
||||
* This service is available outside of maintenance mode to manage maintenance mode
|
||||
*/
|
||||
@Injectable()
|
||||
export class DatabaseBackupService extends BaseService {
|
||||
async listBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
@@ -59,6 +60,7 @@ export const services = [
|
||||
AuthAdminService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseBackupService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
DuplicateService,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -36,28 +36,96 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should return true if enabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: '' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: { action: MaintenanceAction.Start },
|
||||
});
|
||||
|
||||
await expect(sut.getMaintenanceMode()).resolves.toEqual({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('startMaintenance', () => {
|
||||
it('should set maintenance mode and return a secret', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
|
||||
await expect(sut.startMaintenance('admin')).resolves.toMatchObject({
|
||||
await expect(
|
||||
sut.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
'admin',
|
||||
),
|
||||
).resolves.toMatchObject({
|
||||
jwt: expect.any(String),
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AppRestart', {
|
||||
@@ -78,7 +146,13 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should generate a login url with JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.createLoginUrl({
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, signMaintenanceJwt } from 'src/utils/maintenance';
|
||||
import {
|
||||
createMaintenanceLoginUrl,
|
||||
detectPriorInstall,
|
||||
generateMaintenanceSecret,
|
||||
signMaintenanceJwt,
|
||||
} from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -19,9 +29,25 @@ export class MaintenanceService extends BaseService {
|
||||
.then((state) => state ?? { isMaintenanceMode: false });
|
||||
}
|
||||
|
||||
async startMaintenance(username: string): Promise<{ jwt: string }> {
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return {
|
||||
active: false,
|
||||
action: MaintenanceAction.End,
|
||||
};
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async startMaintenance(action: SetMaintenanceModeDto, username: string): Promise<{ jwt: string }> {
|
||||
const secret = generateMaintenanceSecret();
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, { isMaintenanceMode: true, secret });
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action,
|
||||
});
|
||||
|
||||
await this.eventRepository.emit('AppRestart', { isMaintenanceMode: true });
|
||||
|
||||
return {
|
||||
@@ -31,6 +57,20 @@ export class MaintenanceService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
async startRestoreFlow(): Promise<{ jwt: string }> {
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
}
|
||||
|
||||
return this.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.SelectDatabaseRestore,
|
||||
},
|
||||
'admin',
|
||||
);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AppRestart', server: true })
|
||||
onRestart(event: ArgOf<'AppRestart'>, ack?: (ok: 'ok') => void): void {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Asset, AssetFile } from 'src/database';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
AssetOrder,
|
||||
AssetType,
|
||||
@@ -481,7 +482,9 @@ export interface MemoryData {
|
||||
|
||||
export type VersionCheckMetadata = { checkedAt: string; releaseVersion: string };
|
||||
export type SystemFlags = { mountChecks: Record<StorageFolder, boolean> };
|
||||
export type MaintenanceModeState = { isMaintenanceMode: true; secret: string } | { isMaintenanceMode: false };
|
||||
export type MaintenanceModeState =
|
||||
| { isMaintenanceMode: true; secret: string; action: SetMaintenanceModeDto }
|
||||
| { isMaintenanceMode: false };
|
||||
export type MemoriesState = {
|
||||
/** memories have already been created through this date */
|
||||
lastOnThisDayDate: string;
|
||||
|
||||
494
server/src/utils/database-backups.ts
Normal file
494
server/src/utils/database-backups.ts
Normal file
@@ -0,0 +1,494 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { debounce } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import path, { basename, join } from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { CacheControl, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function isValidDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^[\d\w-.]+\.sql(?:\.gz)?$/);
|
||||
}
|
||||
|
||||
export function isValidDatabaseRoutineBackupName(filename: string) {
|
||||
const oldBackupStyle = filename.match(/^immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = filename.match(/^immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
}
|
||||
|
||||
export function isFailedDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^immich-db-backup-.*\.sql\.gz\.tmp$/);
|
||||
}
|
||||
|
||||
export function findVersion(filename: string) {
|
||||
return /-v(.*)-/.exec(filename)?.[1];
|
||||
}
|
||||
|
||||
type BackupRepos = {
|
||||
logger: LoggingRepository;
|
||||
storage: StorageRepository;
|
||||
config: ConfigRepository;
|
||||
process: ProcessRepository;
|
||||
database: DatabaseRepository;
|
||||
health: MaintenanceHealthRepository;
|
||||
};
|
||||
|
||||
export class UnsupportedPostgresError extends Error {
|
||||
constructor(databaseVersion: string) {
|
||||
super(`Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildPostgresLaunchArguments(
|
||||
{ logger, config, database }: Pick<BackupRepos, 'logger' | 'config' | 'database'>,
|
||||
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
|
||||
options: {
|
||||
singleTransaction?: boolean;
|
||||
username?: string;
|
||||
} = {},
|
||||
): Promise<{
|
||||
bin: string;
|
||||
args: string[];
|
||||
databasePassword: string;
|
||||
databaseVersion: string;
|
||||
databaseMajorVersion?: number;
|
||||
}> {
|
||||
const {
|
||||
database: { config: databaseConfig },
|
||||
} = config.getEnv();
|
||||
const isUrlConnection = databaseConfig.connectionType === 'url';
|
||||
|
||||
const databaseVersion = await database.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
const args: string[] = [];
|
||||
|
||||
if (isUrlConnection) {
|
||||
if (bin !== 'pg_dump') {
|
||||
args.push('--dbname');
|
||||
}
|
||||
|
||||
let url = databaseConfig.url;
|
||||
if (URL.canParse(databaseConfig.url)) {
|
||||
const parsedUrl = new URL(databaseConfig.url);
|
||||
// remove known bad parameters
|
||||
parsedUrl.searchParams.delete('uselibpqcompat');
|
||||
|
||||
if (options.username) {
|
||||
parsedUrl.username = options.username;
|
||||
}
|
||||
|
||||
url = parsedUrl.toString();
|
||||
}
|
||||
|
||||
args.push(url);
|
||||
} else {
|
||||
args.push(
|
||||
'--username',
|
||||
options.username ?? databaseConfig.username,
|
||||
'--host',
|
||||
databaseConfig.host,
|
||||
'--port',
|
||||
databaseConfig.port.toString(),
|
||||
);
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dumpall': {
|
||||
args.push('--database');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push('--dbname');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
args.push(databaseConfig.database);
|
||||
}
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dump':
|
||||
case 'pg_dumpall': {
|
||||
args.push('--clean', '--if-exists');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
if (options.singleTransaction) {
|
||||
args.push(
|
||||
// don't commit any transaction on failure
|
||||
'--single-transaction',
|
||||
// exit with non-zero code on error
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
);
|
||||
}
|
||||
|
||||
args.push(
|
||||
// used for progress monitoring
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
throw new UnsupportedPostgresError(databaseVersion);
|
||||
}
|
||||
|
||||
return {
|
||||
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
|
||||
args,
|
||||
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
|
||||
databaseVersion,
|
||||
databaseMajorVersion,
|
||||
};
|
||||
}
|
||||
|
||||
export async function createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, ...pgRepos }: Omit<BackupRepos, 'health'>,
|
||||
filenamePrefix: string = '',
|
||||
): Promise<string> {
|
||||
logger.debug(`Database Backup Started`);
|
||||
|
||||
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, ...pgRepos },
|
||||
'pg_dump',
|
||||
);
|
||||
|
||||
logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
|
||||
const backupFilePath = join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
const temporaryFilePath = `${backupFilePath}.tmp`;
|
||||
|
||||
try {
|
||||
const pgdump = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = storage.createWriteStream(temporaryFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
await storage.rename(temporaryFilePath, backupFilePath);
|
||||
} catch (error) {
|
||||
logger.error(`Database Backup Failure: ${error}`);
|
||||
await storage
|
||||
.unlink(temporaryFilePath)
|
||||
.catch((error) => logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.log(`Database Backup Success`);
|
||||
return backupFilePath;
|
||||
}
|
||||
|
||||
const SQL_DROP_CONNECTIONS = `
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
`;
|
||||
|
||||
const SQL_RESET_SCHEMA = `
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO postgres;
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
`;
|
||||
|
||||
async function* sql(inputStream: Readable, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield isPgClusterDump
|
||||
? String.raw`
|
||||
\c postgres
|
||||
`
|
||||
: SQL_RESET_SCHEMA;
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
async function* sqlRollback(inputStream: Readable, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
|
||||
if (isPgClusterDump) {
|
||||
yield String.raw`
|
||||
-- try to create database
|
||||
-- may fail but script will continue running
|
||||
CREATE DATABASE immich;
|
||||
|
||||
-- switch to database / newly created database
|
||||
\c immich
|
||||
`;
|
||||
}
|
||||
|
||||
yield SQL_RESET_SCHEMA;
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, health, ...pgRepos }: BackupRepos,
|
||||
filename: string,
|
||||
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
|
||||
): Promise<void> {
|
||||
logger.debug(`Database Restore Started`);
|
||||
|
||||
let complete = false;
|
||||
try {
|
||||
if (!isValidDatabaseBackupName(filename)) {
|
||||
throw new Error('Invalid backup file format!');
|
||||
}
|
||||
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
await storage.stat(backupFilePath); // => check file exists
|
||||
|
||||
let isPgClusterDump = false;
|
||||
const version = findVersion(filename);
|
||||
if (version && semver.satisfies(version, '<= 2.4')) {
|
||||
isPgClusterDump = true;
|
||||
}
|
||||
|
||||
const { bin, args, databasePassword, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, database: databaseRepository, ...pgRepos },
|
||||
'psql',
|
||||
{
|
||||
singleTransaction: !isPgClusterDump,
|
||||
username: isPgClusterDump ? 'postgres' : undefined,
|
||||
},
|
||||
);
|
||||
|
||||
progressCb?.('backup', 0.05);
|
||||
|
||||
const restorePointFilePath = await createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, ...pgRepos },
|
||||
'restore-point-',
|
||||
);
|
||||
|
||||
logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
let inputStream: Readable;
|
||||
if (backupFilePath.endsWith('.gz')) {
|
||||
const fileStream = storage.createPlainReadStream(backupFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
} else {
|
||||
inputStream = storage.createPlainReadStream(backupFilePath);
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql(inputStream, isPgClusterDump));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('restore', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
try {
|
||||
progressCb?.('migrations', 0.9);
|
||||
await databaseRepository.runMigrations();
|
||||
await health.checkApiHealth();
|
||||
} catch (error) {
|
||||
progressCb?.('rollback', 0);
|
||||
|
||||
const fileStream = storage.createPlainReadStream(restorePointFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
|
||||
const sqlStream = Readable.from(sqlRollback(inputStream, isPgClusterDump));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('rollback', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Database Restore Failure: ${error}`);
|
||||
throw error;
|
||||
} finally {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
logger.log(`Database Restore Success`);
|
||||
}
|
||||
|
||||
export async function deleteDatabaseBackup({ storage }: Pick<BackupRepos, 'storage'>, files: string[]): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
|
||||
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
await Promise.all(files.map((filename) => storage.unlink(path.join(backupsFolder, filename))));
|
||||
}
|
||||
|
||||
export async function listDatabaseBackups({
|
||||
storage,
|
||||
}: Pick<BackupRepos, 'storage'>): Promise<{ filename: string; filesize: number }[]> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await storage.readdir(backupsFolder);
|
||||
|
||||
const validFiles = files
|
||||
.filter((fn) => isValidDatabaseBackupName(fn))
|
||||
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
|
||||
.toReversed();
|
||||
|
||||
const backups = await Promise.all(
|
||||
validFiles.map(async (filename) => {
|
||||
const stats = await storage.stat(path.join(backupsFolder, filename));
|
||||
return { filename, filesize: stats.size };
|
||||
}),
|
||||
);
|
||||
|
||||
return backups;
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBackup(
|
||||
{ storage }: Pick<BackupRepos, 'storage'>,
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const fn = basename(file.originalname);
|
||||
if (!isValidDatabaseBackupName(fn)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(backupsFolder, `uploaded-${fn}`);
|
||||
await storage.createOrOverwriteFile(path, file.buffer);
|
||||
}
|
||||
|
||||
export function downloadDatabaseBackup(fileName: string) {
|
||||
if (!isValidDatabaseBackupName(fileName)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
|
||||
|
||||
return {
|
||||
path,
|
||||
fileName,
|
||||
cacheControl: CacheControl.PrivateWithoutCache,
|
||||
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
|
||||
};
|
||||
}
|
||||
|
||||
function createSqlProgressStreams(cb: (progress: number) => void) {
|
||||
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
|
||||
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
|
||||
|
||||
let readingStdin = false;
|
||||
let sequenceIdx = 0;
|
||||
|
||||
let linesSent = 0;
|
||||
let linesProcessed = 0;
|
||||
|
||||
const startedAt = +Date.now();
|
||||
const cbDebounced = debounce(
|
||||
() => {
|
||||
const progress = source.writableEnded
|
||||
? Math.min(1, linesProcessed / linesSent)
|
||||
: // progress simulation while we're in an indeterminate state
|
||||
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
|
||||
cb(progress);
|
||||
},
|
||||
100,
|
||||
{
|
||||
maxWait: 100,
|
||||
},
|
||||
);
|
||||
|
||||
let lastByte = -1;
|
||||
const source = new PassThrough({
|
||||
transform(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (!readingStdin && byte === 10 && lastByte !== 10) {
|
||||
linesSent += 1;
|
||||
}
|
||||
|
||||
lastByte = byte;
|
||||
|
||||
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
|
||||
if (sequence[sequenceIdx] === byte) {
|
||||
sequenceIdx += 1;
|
||||
|
||||
if (sequence.length === sequenceIdx) {
|
||||
sequenceIdx = 0;
|
||||
readingStdin = !readingStdin;
|
||||
}
|
||||
} else {
|
||||
sequenceIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (byte === 10) {
|
||||
linesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
return [source, sink];
|
||||
}
|
||||
@@ -42,7 +42,7 @@ const cacheControlHeaders: Record<CacheControl, string | null> = {
|
||||
export const sendFile = async (
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
handler: () => Promise<ImmichFileResponse>,
|
||||
handler: () => Promise<ImmichFileResponse> | ImmichFileResponse,
|
||||
logger: LoggingRepository,
|
||||
): Promise<void> => {
|
||||
// promisified version of 'res.sendFile' for cleaner async handling
|
||||
|
||||
@@ -1,6 +1,59 @@
|
||||
import { createAdapter } from '@socket.io/redis-adapter';
|
||||
import Redis from 'ioredis';
|
||||
import { SignJWT } from 'jose';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { join } from 'node:path';
|
||||
import { Server as SocketIO } from 'socket.io';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAuthDto, MaintenanceDetectInstallResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { AppRestartEvent } from 'src/repositories/event.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function sendOneShotAppRestart(state: AppRestartEvent): void {
|
||||
const server = new SocketIO();
|
||||
const { redis } = new ConfigRepository().getEnv();
|
||||
const pubClient = new Redis(redis);
|
||||
const subClient = pubClient.duplicate();
|
||||
server.adapter(createAdapter(pubClient, subClient));
|
||||
|
||||
/**
|
||||
* Keep trying until we manage to stop Immich
|
||||
*
|
||||
* Sometimes there appear to be communication
|
||||
* issues between to the other servers.
|
||||
*
|
||||
* This issue only occurs with this method.
|
||||
*/
|
||||
async function tryTerminate() {
|
||||
while (true) {
|
||||
try {
|
||||
const responses = await server.serverSideEmitWithAck('AppRestart', state);
|
||||
if (responses.length > 0) {
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
console.error('Encountered an error while telling Immich to stop.');
|
||||
}
|
||||
|
||||
console.info(
|
||||
"\nIt doesn't appear that Immich stopped, trying again in a moment.\nIf Immich is already not running, you can ignore this error.",
|
||||
);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1e3));
|
||||
}
|
||||
}
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
server.emit('AppRestartV1', state, () => {
|
||||
void tryTerminate().finally(() => {
|
||||
pubClient.disconnect();
|
||||
subClient.disconnect();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function createMaintenanceLoginUrl(
|
||||
baseUrl: string,
|
||||
@@ -23,3 +76,37 @@ export async function signMaintenanceJwt(secret: string, data: MaintenanceAuthDt
|
||||
export function generateMaintenanceSecret(): string {
|
||||
return randomBytes(64).toString('hex');
|
||||
}
|
||||
|
||||
export async function detectPriorInstall(
|
||||
storageRepository: StorageRepository,
|
||||
): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return {
|
||||
storage: await Promise.all(
|
||||
Object.values(StorageFolder).map(async (folder) => {
|
||||
const path = StorageCore.getBaseFolder(folder);
|
||||
const files = await storageRepository.readdir(path);
|
||||
const filename = join(StorageCore.getBaseFolder(folder), '.immich');
|
||||
|
||||
let readable = false,
|
||||
writable = false;
|
||||
|
||||
try {
|
||||
await storageRepository.readFile(filename);
|
||||
readable = true;
|
||||
|
||||
await storageRepository.overwriteFile(filename, Buffer.from(`${Date.now()}`));
|
||||
writable = true;
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
return {
|
||||
folder,
|
||||
readable,
|
||||
writable,
|
||||
files: files.filter((fn) => fn !== '.immich').length,
|
||||
};
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -139,6 +139,16 @@ export class UUIDAssetIDParamDto {
|
||||
assetId!: string;
|
||||
}
|
||||
|
||||
export class FilenameParamDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
@ApiProperty({ format: 'string' })
|
||||
@Matches(/^[a-zA-Z0-9_\-.]+$/, {
|
||||
message: 'Filename contains invalid characters',
|
||||
})
|
||||
filename!: string;
|
||||
}
|
||||
|
||||
type PinCodeOptions = { optional?: boolean } & OptionalOptions;
|
||||
export const PinCode = (options?: PinCodeOptions & ApiPropertyOptions) => {
|
||||
const { optional, nullable, emptyToNull, ...apiPropertyOptions } = {
|
||||
|
||||
@@ -12,12 +12,11 @@ async function bootstrap() {
|
||||
|
||||
const app = await NestFactory.create<NestExpressApplication>(MaintenanceModule, { bufferLogs: true });
|
||||
app.get(AppRepository).setCloseFn(() => app.close());
|
||||
|
||||
void configureExpress(app, {
|
||||
permitSwaggerWrite: false,
|
||||
ssr: MaintenanceWorkerService,
|
||||
});
|
||||
|
||||
void app.get(MaintenanceWorkerService).logSecret();
|
||||
}
|
||||
|
||||
bootstrap().catch((error) => {
|
||||
|
||||
@@ -49,6 +49,9 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
|
||||
return {
|
||||
createZipStream: vitest.fn(),
|
||||
createReadStream: vitest.fn(),
|
||||
createPlainReadStream: vitest.fn(),
|
||||
createGzip: vitest.fn(),
|
||||
createGunzip: vitest.fn(),
|
||||
readFile: vitest.fn(),
|
||||
readTextFile: vitest.fn(),
|
||||
createFile: vitest.fn(),
|
||||
|
||||
@@ -7,7 +7,7 @@ import { NextFunction } from 'express';
|
||||
import { Kysely } from 'kysely';
|
||||
import multer from 'multer';
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { Duplex, Readable, Writable } from 'node:stream';
|
||||
import { PNG } from 'pngjs';
|
||||
import postgres from 'postgres';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
@@ -496,6 +496,74 @@ export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: st
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
||||
export const mockDuplex = vitest.fn(
|
||||
(command: string, exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
const duplex = new Duplex({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
read() {},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
setImmediate(() => {
|
||||
if (error) {
|
||||
duplex.destroy(error as Error);
|
||||
} else if (exitCode === 0) {
|
||||
/* eslint-disable unicorn/prefer-single-call */
|
||||
duplex.push(stdout);
|
||||
duplex.push(null);
|
||||
/* eslint-enable unicorn/prefer-single-call */
|
||||
} else {
|
||||
duplex.destroy(new Error(`${command} non-zero exit code (${exitCode})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
return duplex;
|
||||
},
|
||||
);
|
||||
|
||||
export const mockFork = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
const stdoutStream = new Readable({
|
||||
read() {
|
||||
this.push(stdout); // write mock data to stdout
|
||||
this.push(null); // end stream
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
stdout: stdoutStream,
|
||||
stderr: new Readable({
|
||||
read() {
|
||||
this.push(stderr); // write mock data to stderr
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stdin: new Writable({
|
||||
write(chunk, encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
}),
|
||||
exitCode,
|
||||
on: vitest.fn((event, callback: any) => {
|
||||
if (event === 'close') {
|
||||
stdoutStream.once('end', () => callback(0));
|
||||
}
|
||||
if (event === 'error' && error) {
|
||||
stdoutStream.once('end', () => callback(error));
|
||||
}
|
||||
if (event === 'exit') {
|
||||
stdoutStream.once('end', () => callback(exitCode));
|
||||
}
|
||||
}),
|
||||
kill: vitest.fn(),
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
||||
export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T> {
|
||||
for (const item of items) {
|
||||
await Promise.resolve();
|
||||
|
||||
@@ -1,48 +1,42 @@
|
||||
import { photoZoomState } from '$lib/stores/zoom-image.store';
|
||||
import { useZoomImageWheel } from '@zoom-image/svelte';
|
||||
import { createZoomImageWheel } from '@zoom-image/core';
|
||||
import { get } from 'svelte/store';
|
||||
|
||||
export const zoomImageAction = (node: HTMLElement, options?: { disabled?: boolean }) => {
|
||||
const { createZoomImage, zoomImageState, setZoomImageState } = useZoomImageWheel();
|
||||
|
||||
createZoomImage(node, {
|
||||
const state = get(photoZoomState);
|
||||
const zoomInstance = createZoomImageWheel(node, {
|
||||
maxZoom: 10,
|
||||
initialState: state,
|
||||
});
|
||||
|
||||
const state = get(photoZoomState);
|
||||
if (state) {
|
||||
setZoomImageState(state);
|
||||
}
|
||||
const unsubscribes = [
|
||||
photoZoomState.subscribe((state) => zoomInstance.setState(state)),
|
||||
zoomInstance.subscribe(({ state }) => {
|
||||
photoZoomState.set(state);
|
||||
}),
|
||||
];
|
||||
|
||||
// Store original event handlers so we can prevent them when disabled
|
||||
const wheelHandler = (event: WheelEvent) => {
|
||||
const stopIfDisabled = (event: Event) => {
|
||||
if (options?.disabled) {
|
||||
event.stopImmediatePropagation();
|
||||
}
|
||||
};
|
||||
|
||||
const pointerDownHandler = (event: PointerEvent) => {
|
||||
if (options?.disabled) {
|
||||
event.stopImmediatePropagation();
|
||||
}
|
||||
};
|
||||
|
||||
// Add handlers at capture phase with higher priority
|
||||
node.addEventListener('wheel', wheelHandler, { capture: true });
|
||||
node.addEventListener('pointerdown', pointerDownHandler, { capture: true });
|
||||
|
||||
const unsubscribes = [photoZoomState.subscribe(setZoomImageState), zoomImageState.subscribe(photoZoomState.set)];
|
||||
node.addEventListener('wheel', stopIfDisabled, { capture: true });
|
||||
node.addEventListener('pointerdown', stopIfDisabled, { capture: true });
|
||||
|
||||
node.style.overflow = 'visible';
|
||||
return {
|
||||
update(newOptions?: { disabled?: boolean }) {
|
||||
options = newOptions;
|
||||
},
|
||||
destroy() {
|
||||
node.removeEventListener('wheel', wheelHandler, { capture: true });
|
||||
node.removeEventListener('pointerdown', pointerDownHandler, { capture: true });
|
||||
for (const unsubscribe of unsubscribes) {
|
||||
unsubscribe();
|
||||
}
|
||||
node.removeEventListener('wheel', stopIfDisabled, { capture: true });
|
||||
node.removeEventListener('pointerdown', stopIfDisabled, { capture: true });
|
||||
zoomInstance.cleanup();
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
<script lang="ts">
|
||||
import { handleError } from '$lib/utils/handle-error';
|
||||
import { MaintenanceAction, setMaintenanceMode } from '@immich/sdk';
|
||||
import { Button } from '@immich/ui';
|
||||
import { t } from 'svelte-i18n';
|
||||
import { fade } from 'svelte/transition';
|
||||
|
||||
interface Props {
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
let { disabled = false }: Props = $props();
|
||||
|
||||
async function start() {
|
||||
try {
|
||||
await setMaintenanceMode({
|
||||
setMaintenanceModeDto: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_start_error'));
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div>
|
||||
<div in:fade={{ duration: 500 }}>
|
||||
<div class="ms-4 mt-4 flex items-end gap-4">
|
||||
<Button shape="round" type="submit" {disabled} size="small" onclick={start}
|
||||
>{$t('admin.maintenance_start')}</Button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -6,7 +6,7 @@
|
||||
import { sidebarStore } from '$lib/stores/sidebar.svelte';
|
||||
import type { HeaderButtonActionItem } from '$lib/types';
|
||||
import { AppShell, AppShellHeader, AppShellSidebar, MenuItemType, NavbarItem, type BreadcrumbItem } from '@immich/ui';
|
||||
import { mdiAccountMultipleOutline, mdiBookshelf, mdiCog, mdiServer, mdiTrayFull } from '@mdi/js';
|
||||
import { mdiAccountMultipleOutline, mdiBookshelf, mdiCog, mdiServer, mdiTrayFull, mdiWrench } from '@mdi/js';
|
||||
import type { Snippet } from 'svelte';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
<NavbarItem title={$t('external_libraries')} href={Route.libraries()} icon={mdiBookshelf} />
|
||||
<NavbarItem title={$t('admin.queues')} href={Route.queues()} icon={mdiTrayFull} />
|
||||
<NavbarItem title={$t('settings')} href={Route.systemSettings()} icon={mdiCog} />
|
||||
<NavbarItem title={$t('admin.maintenance_settings')} href={Route.systemMaintenance()} icon={mdiWrench} />
|
||||
<NavbarItem title={$t('server_stats')} href={Route.systemStatistics()} icon={mdiServer} />
|
||||
</div>
|
||||
|
||||
|
||||
@@ -5,24 +5,27 @@
|
||||
title?: string;
|
||||
children?: Snippet;
|
||||
withHeader?: boolean;
|
||||
withBackdrop?: boolean;
|
||||
}
|
||||
|
||||
let { title, children, withHeader = true }: Props = $props();
|
||||
let { title, children, withHeader = true, withBackdrop = true }: Props = $props();
|
||||
</script>
|
||||
|
||||
<section class="min-w-dvw flex min-h-dvh items-center justify-center relative">
|
||||
<div class="absolute -z-10 w-full h-full flex place-items-center place-content-center">
|
||||
<img
|
||||
src={immichLogo}
|
||||
class="max-w-(--breakpoint-md) mx-auto h-full mb-2 antialiased overflow-hidden"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div
|
||||
class="w-full h-[99%] absolute start-0 top-0 backdrop-blur-[200px] bg-transparent dark:bg-immich-dark-bg/20"
|
||||
></div>
|
||||
</div>
|
||||
{#if withBackdrop}
|
||||
<div class="absolute -z-10 w-full h-full flex place-items-center place-content-center">
|
||||
<img
|
||||
src={immichLogo}
|
||||
class="max-w-(--breakpoint-md) mx-auto h-full mb-2 antialiased overflow-hidden"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div
|
||||
class="w-full h-[99%] absolute start-0 top-0 backdrop-blur-[200px] bg-transparent dark:bg-immich-dark-bg/20"
|
||||
></div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<Card color="secondary" class="w-full max-w-lg border m-2">
|
||||
<Card color="secondary" class="w-full max-w-xl border m-2">
|
||||
{#if withHeader}
|
||||
<CardHeader class="mt-6">
|
||||
<VStack>
|
||||
|
||||
125
web/src/lib/components/maintenance/MaintenanceBackupEntry.svelte
Normal file
125
web/src/lib/components/maintenance/MaintenanceBackupEntry.svelte
Normal file
@@ -0,0 +1,125 @@
|
||||
<script lang="ts">
|
||||
import OnEvents from '$lib/components/OnEvents.svelte';
|
||||
import { BackupFileStatus } from '$lib/constants';
|
||||
import { getDatabaseBackupActions, handleRestoreDatabaseBackup } from '$lib/services/database-backups.service';
|
||||
import { locale } from '$lib/stores/preferences.store';
|
||||
import { getBytesWithUnit } from '$lib/utils/byte-units';
|
||||
import { Button, Card, CardBody, ContextMenuButton, HStack, Icon, Stack, Text } from '@immich/ui';
|
||||
import { mdiAlertCircle, mdiCheckCircle, mdiDatabaseRefreshOutline } from '@mdi/js';
|
||||
import { DateTime } from 'luxon';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
type Props = {
|
||||
filename: string;
|
||||
filesize: number;
|
||||
expectedVersion: string;
|
||||
};
|
||||
|
||||
const { filename, filesize, expectedVersion }: Props = $props();
|
||||
|
||||
const filesizeText = $derived(getBytesWithUnit(filesize, 1));
|
||||
|
||||
const backupDateTime = $derived.by(() => {
|
||||
const dateMatch = filename.match(/\d+T\d+/);
|
||||
if (dateMatch) {
|
||||
return DateTime.fromFormat(dateMatch[0], "yyyyMMdd'T'HHmmss", { zone: 'utc' }).toLocal();
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const timeDisplay = $derived(backupDateTime?.toLocaleString(DateTime.TIME_SIMPLE));
|
||||
const relativeTime = $derived(backupDateTime?.toRelative({ locale: $locale }));
|
||||
|
||||
const version = $derived(filename.match(/-v(.*)-/)?.[1]);
|
||||
|
||||
const status = $derived.by(() => {
|
||||
if (!version) {
|
||||
return BackupFileStatus.UnknownVersion;
|
||||
}
|
||||
if (version !== expectedVersion) {
|
||||
return BackupFileStatus.DifferentVersion;
|
||||
}
|
||||
return BackupFileStatus.OK;
|
||||
});
|
||||
|
||||
const { Download, Delete } = $derived(getDatabaseBackupActions($t, filename));
|
||||
|
||||
let isDeleting = $state(false);
|
||||
|
||||
function onBackupDeleteStatus(event: { filename: string; isDeleting: boolean }) {
|
||||
if (event.filename === filename) {
|
||||
isDeleting = event.isDeleting;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<OnEvents {onBackupDeleteStatus} />
|
||||
|
||||
<Card class="dark:bg-dark-900">
|
||||
<CardBody class="pt-3 pb-4 px-6">
|
||||
<Stack gap={3} class="grow min-w-0">
|
||||
<div class="flex justify-between items-center gap-3">
|
||||
<HStack gap={2} class="min-w-0">
|
||||
{#if status === BackupFileStatus.OK}
|
||||
<Icon icon={mdiCheckCircle} size="18" class="text-success" />
|
||||
{:else if status === BackupFileStatus.DifferentVersion}
|
||||
<Icon icon={mdiAlertCircle} size="18" class="text-warning" />
|
||||
{:else}
|
||||
<Icon icon={mdiAlertCircle} size="18" class="text-danger" />
|
||||
{/if}
|
||||
|
||||
{#if timeDisplay}
|
||||
<Text class="font-medium" size="small">{timeDisplay}</Text>
|
||||
{:else}
|
||||
<Text class="font-medium" size="small">{$t('unknown_date')}</Text>
|
||||
{/if}
|
||||
{#if relativeTime}
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="w-1 h-1 bg-light-500"></div>
|
||||
<Text size="tiny" color="muted">{relativeTime}</Text>
|
||||
</div>
|
||||
{/if}
|
||||
</HStack>
|
||||
|
||||
<HStack gap={1}>
|
||||
<Button size="small" onclick={() => handleRestoreDatabaseBackup(filename)} disabled={isDeleting}
|
||||
>{$t('restore')}</Button
|
||||
>
|
||||
<ContextMenuButton
|
||||
disabled={isDeleting}
|
||||
position="top-right"
|
||||
aria-label={$t('open')}
|
||||
items={[Download, Delete]}
|
||||
/>
|
||||
</HStack>
|
||||
</div>
|
||||
|
||||
<HStack>
|
||||
<Icon icon={mdiDatabaseRefreshOutline} size="16" color="gray" />
|
||||
<Text size="small" class="break-all font-mono">{filename}</Text>
|
||||
</HStack>
|
||||
|
||||
{#if status === BackupFileStatus.UnknownVersion}
|
||||
<Text size="small" color="danger">
|
||||
{$t('admin.maintenance_restore_backup_unknown_version')}
|
||||
</Text>
|
||||
{:else if status === BackupFileStatus.DifferentVersion}
|
||||
<Text size="small" color="warning">
|
||||
{$t('admin.maintenance_restore_backup_different_version')}
|
||||
</Text>
|
||||
{/if}
|
||||
|
||||
<HStack gap={8}>
|
||||
<div class="flex gap-1">
|
||||
<Text size="tiny" color="muted">{$t('version')}:</Text>
|
||||
<Text size="tiny" fontWeight="medium">{version ? `v${version}` : $t('unknown')}</Text>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-1">
|
||||
<Text size="tiny" color="muted">{$t('size')}:</Text>
|
||||
<Text size="tiny" fontWeight="medium">{filesizeText[0]} {filesizeText[1]}</Text>
|
||||
</div>
|
||||
</HStack>
|
||||
</Stack>
|
||||
</CardBody>
|
||||
</Card>
|
||||
135
web/src/lib/components/maintenance/MaintenanceBackupsList.svelte
Normal file
135
web/src/lib/components/maintenance/MaintenanceBackupsList.svelte
Normal file
@@ -0,0 +1,135 @@
|
||||
<script lang="ts">
|
||||
import HeaderActionButton from '$lib/components/HeaderActionButton.svelte';
|
||||
import MaintenanceBackupEntry from '$lib/components/maintenance/MaintenanceBackupEntry.svelte';
|
||||
import OnEvents from '$lib/components/OnEvents.svelte';
|
||||
import { handleUploadDatabaseBackup } from '$lib/services/database-backups.service';
|
||||
import type { DatabaseBackupDto } from '@immich/sdk';
|
||||
import { listDatabaseBackups } from '@immich/sdk';
|
||||
import { Card, CardBody, HStack, Icon, ProgressBar, Stack, Text } from '@immich/ui';
|
||||
import { mdiCalendar, mdiTrayArrowUp } from '@mdi/js';
|
||||
import { DateTime } from 'luxon';
|
||||
import { onMount } from 'svelte';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
type Props = {
|
||||
backups?: DatabaseBackupDto[];
|
||||
expectedVersion: string;
|
||||
};
|
||||
|
||||
let props: Props = $props();
|
||||
|
||||
let backups = $state(props.backups ?? []);
|
||||
|
||||
async function reloadBackups() {
|
||||
const result = await listDatabaseBackups();
|
||||
backups = result.backups;
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
if (!props.backups) {
|
||||
void reloadBackups();
|
||||
}
|
||||
});
|
||||
|
||||
let uploadProgress = $state(-1);
|
||||
|
||||
function onBackupDeleted(event: { filename: string }) {
|
||||
backups = backups.filter((backup) => backup.filename !== event.filename);
|
||||
}
|
||||
|
||||
function onBackupUpload(event: { progress: number; isComplete: boolean }) {
|
||||
uploadProgress = event.progress;
|
||||
|
||||
if (event.isComplete) {
|
||||
void reloadBackups();
|
||||
}
|
||||
}
|
||||
|
||||
const groupedBackups = $derived.by(() => {
|
||||
// eslint-disable-next-line svelte/prefer-svelte-reactivity
|
||||
const groups = new Map<string, { date: DateTime; backups: DatabaseBackupDto[] }>();
|
||||
const unknownDateKey = $t('unknown_date');
|
||||
|
||||
for (const backup of backups) {
|
||||
const dateMatch = backup.filename.match(/\d+T\d+/);
|
||||
let dateKey: string;
|
||||
let dt: DateTime;
|
||||
|
||||
if (dateMatch) {
|
||||
dt = DateTime.fromFormat(dateMatch[0], "yyyyMMdd'T'HHmmss", { zone: 'utc' });
|
||||
dateKey = dt.toFormat('LLLL d, yyyy');
|
||||
} else {
|
||||
dt = DateTime.fromMillis(0);
|
||||
dateKey = unknownDateKey;
|
||||
}
|
||||
|
||||
if (!groups.has(dateKey)) {
|
||||
groups.set(dateKey, { date: dt.startOf('day'), backups: [] });
|
||||
}
|
||||
groups.get(dateKey)!.backups.push(backup);
|
||||
}
|
||||
|
||||
// Sort by date descending (newest first), but put unknown date at the top
|
||||
const sortedEntries = [...groups.entries()].sort((a, b) => {
|
||||
if (a[0] === unknownDateKey) {
|
||||
return -1;
|
||||
}
|
||||
if (b[0] === unknownDateKey) {
|
||||
return 1;
|
||||
}
|
||||
return b[1].date.toMillis() - a[1].date.toMillis();
|
||||
});
|
||||
|
||||
return new Map(sortedEntries.map(([key, value]) => [key, value.backups]));
|
||||
});
|
||||
</script>
|
||||
|
||||
<OnEvents {onBackupDeleted} {onBackupUpload} />
|
||||
|
||||
<Stack gap={4} class="mt-4 text-left">
|
||||
<Card color="info">
|
||||
<CardBody>
|
||||
{#if uploadProgress === -1}
|
||||
<div class="flex justify-between items-center">
|
||||
<div class="flex gap-2 items-end w-max">
|
||||
<Icon icon={mdiTrayArrowUp} size="20" class="text-muted"></Icon>
|
||||
<Text class="grow">{$t('admin.maintenance_upload_backup')}</Text>
|
||||
</div>
|
||||
<HeaderActionButton
|
||||
action={{
|
||||
color: 'primary',
|
||||
title: $t('select_from_computer'),
|
||||
onAction: handleUploadDatabaseBackup,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
<HStack gap={8}>
|
||||
<Text class="grow">{$t('asset_uploading')}</Text>
|
||||
<ProgressBar progress={uploadProgress} size="tiny" />
|
||||
</HStack>
|
||||
{/if}
|
||||
</CardBody>
|
||||
</Card>
|
||||
|
||||
<hr />
|
||||
|
||||
{#each [...groupedBackups.entries()] as [dateGroup, groupBackups] (dateGroup)}
|
||||
<Stack gap={2}>
|
||||
<div class="mt-5 mb-1">
|
||||
<div class="bg-primary-50 flex gap-2 px-4 py-2 rounded-xl w-max place-items-center">
|
||||
<Icon icon={mdiCalendar} size="18" />
|
||||
<Text size="small" fontWeight="medium" color="muted">{dateGroup}</Text>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#each groupBackups as backup (backup.filename)}
|
||||
<MaintenanceBackupEntry
|
||||
filename={backup.filename}
|
||||
filesize={backup.filesize}
|
||||
expectedVersion={props.expectedVersion}
|
||||
/>
|
||||
{/each}
|
||||
</Stack>
|
||||
{/each}
|
||||
</Stack>
|
||||
@@ -0,0 +1,19 @@
|
||||
<script lang="ts">
|
||||
import RestoreFlowDetectInstall from '$lib/components/maintenance/restore-flow/RestoreFlowDetectInstall.svelte';
|
||||
import RestoreFlowSelectBackup from '$lib/components/maintenance/restore-flow/RestoreFlowSelectBackup.svelte';
|
||||
|
||||
type Props = {
|
||||
end: () => void;
|
||||
expectedVersion: string;
|
||||
};
|
||||
|
||||
const { end, expectedVersion }: Props = $props();
|
||||
|
||||
let stage = $state(0);
|
||||
</script>
|
||||
|
||||
{#if stage === 0}
|
||||
<RestoreFlowDetectInstall next={() => stage++} {end} />
|
||||
{:else}
|
||||
<RestoreFlowSelectBackup previous={() => stage--} {end} {expectedVersion} />
|
||||
{/if}
|
||||
@@ -0,0 +1,98 @@
|
||||
<script lang="ts">
|
||||
import { detectPriorInstall, type MaintenanceDetectInstallResponseDto } from '@immich/sdk';
|
||||
import { Button, Heading, HStack, Icon, Stack, Text } from '@immich/ui';
|
||||
import { mdiAlert, mdiArrowRight, mdiCheck, mdiClose, mdiRefresh } from '@mdi/js';
|
||||
import { onMount } from 'svelte';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
type Props = {
|
||||
next: () => void;
|
||||
end: () => void;
|
||||
};
|
||||
|
||||
const { next, end }: Props = $props();
|
||||
|
||||
let detectedInstall: MaintenanceDetectInstallResponseDto | undefined = $state();
|
||||
|
||||
const reload = async () => {
|
||||
detectedInstall = await detectPriorInstall();
|
||||
};
|
||||
|
||||
const getLibraryFolderCheckStatus = (writable: boolean, readable: boolean) => {
|
||||
if (writable) {
|
||||
return $t('maintenance_restore_library_folder_pass');
|
||||
} else if (readable) {
|
||||
return $t('maintenance_restore_library_folder_write_fail');
|
||||
} else {
|
||||
return $t('maintenance_restore_library_folder_read_fail');
|
||||
}
|
||||
};
|
||||
|
||||
onMount(() => reload());
|
||||
</script>
|
||||
|
||||
<Heading size="large" color="primary" tag="h1">{$t('maintenance_restore_library')}</Heading>
|
||||
<Text>{$t('maintenance_restore_library_description')}</Text>
|
||||
<div class="bg-white dark:bg-black w-full m-4 p-6 rounded-xl border border-light-300">
|
||||
<Stack>
|
||||
{#if detectedInstall}
|
||||
{#each detectedInstall.storage as { folder, readable, writable } (folder)}
|
||||
<HStack>
|
||||
<Icon icon={writable ? mdiCheck : mdiClose} class={writable ? 'text-success' : 'text-danger'} />
|
||||
<Text>{folder} ({getLibraryFolderCheckStatus(writable, readable)})</Text>
|
||||
</HStack>
|
||||
{/each}
|
||||
{#each detectedInstall.storage as { folder, files } (folder)}
|
||||
{#if folder !== 'backups'}
|
||||
<HStack class="items-start">
|
||||
<Icon
|
||||
class={`mt-1 ${files ? 'text-success' : folder === 'profile' || folder === 'upload' ? 'text-danger' : 'text-warning'}`}
|
||||
icon={files ? mdiCheck : folder === 'profile' || folder === 'upload' ? mdiClose : mdiAlert}
|
||||
/>
|
||||
<Stack gap={0} class="items-start">
|
||||
<Text>
|
||||
{#if files}
|
||||
{$t('maintenance_restore_library_folder_has_files', {
|
||||
values: {
|
||||
folder,
|
||||
count: files,
|
||||
},
|
||||
})}
|
||||
{:else}
|
||||
{$t('maintenance_restore_library_folder_no_files', {
|
||||
values: {
|
||||
folder,
|
||||
},
|
||||
})}
|
||||
{/if}
|
||||
</Text>
|
||||
{#if !files}
|
||||
{#if folder === 'profile' || folder === 'upload'}
|
||||
<Text variant="italic">{$t('maintenance_restore_library_hint_missing_files')}</Text>
|
||||
{/if}
|
||||
{#if folder === 'encoded-video' || folder === 'thumbs'}
|
||||
<Text variant="italic">{$t('maintenance_restore_library_hint_regenerate_later')}</Text>
|
||||
{/if}
|
||||
{#if folder === 'library'}
|
||||
<Text variant="italic">{$t('maintenance_restore_library_hint_storage_template_missing_files')}</Text>
|
||||
{/if}
|
||||
{/if}
|
||||
</Stack>
|
||||
</HStack>
|
||||
{/if}
|
||||
{/each}
|
||||
|
||||
<Button leadingIcon={mdiRefresh} variant="ghost" onclick={reload}>{$t('refresh')}</Button>
|
||||
{:else}
|
||||
<HStack>
|
||||
<Icon icon={mdiRefresh} color="rgb(var(--immich-ui-primary))" />
|
||||
<Text>{$t('maintenance_restore_library_loading')}</Text>
|
||||
</HStack>
|
||||
{/if}
|
||||
</Stack>
|
||||
</div>
|
||||
<Text>{$t('maintenance_restore_library_confirm')}</Text>
|
||||
<HStack>
|
||||
<Button onclick={end} variant="ghost">{$t('cancel')}</Button>
|
||||
<Button onclick={next} trailingIcon={mdiArrowRight}>{$t('next')}</Button>
|
||||
</HStack>
|
||||
@@ -0,0 +1,23 @@
|
||||
<script lang="ts">
|
||||
import { Button, Heading, HStack, Scrollable } from '@immich/ui';
|
||||
import { mdiArrowLeft } from '@mdi/js';
|
||||
import { t } from 'svelte-i18n';
|
||||
import MaintenanceBackupsList from '../MaintenanceBackupsList.svelte';
|
||||
|
||||
type Props = {
|
||||
previous: () => void;
|
||||
end: () => void;
|
||||
expectedVersion: string;
|
||||
};
|
||||
|
||||
const { previous, end, expectedVersion }: Props = $props();
|
||||
</script>
|
||||
|
||||
<Heading size="large" color="primary" tag="h1">{$t('maintenance_restore_from_backup')}</Heading>
|
||||
<Scrollable class="max-h-120 bg-white dark:bg-black p-4 rounded-2xl border border-light-300 w-full">
|
||||
<MaintenanceBackupsList {expectedVersion} />
|
||||
</Scrollable>
|
||||
<HStack>
|
||||
<Button onclick={end} variant="ghost">{$t('cancel')}</Button>
|
||||
<Button onclick={previous} variant="ghost" leadingIcon={mdiArrowLeft}>{$t('back')}</Button>
|
||||
</HStack>
|
||||
@@ -396,4 +396,10 @@ export enum ToggleVisibility {
|
||||
SHOW_ALL = 'show-all',
|
||||
}
|
||||
|
||||
export enum BackupFileStatus {
|
||||
OK,
|
||||
DifferentVersion,
|
||||
UnknownVersion,
|
||||
}
|
||||
|
||||
export const assetViewerFadeDuration: number = 150;
|
||||
|
||||
@@ -45,6 +45,10 @@ export type Events = {
|
||||
|
||||
PersonUpdate: [PersonResponseDto];
|
||||
|
||||
BackupDeleteStatus: [{ filename: string; isDeleting: boolean }];
|
||||
BackupDeleted: [{ filename: string }];
|
||||
BackupUpload: [{ progress: number; isComplete: boolean }];
|
||||
|
||||
QueueUpdate: [QueueResponseDto];
|
||||
|
||||
SharedLinkCreate: [SharedLinkResponseDto];
|
||||
|
||||
121
web/src/lib/services/database-backups.service.ts
Normal file
121
web/src/lib/services/database-backups.service.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { eventManager } from '$lib/managers/event-manager.svelte';
|
||||
import { uploadRequest } from '$lib/utils';
|
||||
import { openFilePicker } from '$lib/utils/file-uploader';
|
||||
import { handleError } from '$lib/utils/handle-error';
|
||||
import { getFormatter } from '$lib/utils/i18n';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
getBaseUrl,
|
||||
MaintenanceAction,
|
||||
setMaintenanceMode,
|
||||
type DatabaseBackupUploadDto,
|
||||
} from '@immich/sdk';
|
||||
import { modalManager, type ActionItem } from '@immich/ui';
|
||||
import { mdiDownload, mdiTrashCanOutline } from '@mdi/js';
|
||||
import type { MessageFormatter } from 'svelte-i18n';
|
||||
|
||||
export const getDatabaseBackupActions = ($t: MessageFormatter, filename: string) => {
|
||||
const Download: ActionItem = {
|
||||
type: $t('command'),
|
||||
title: $t('download'),
|
||||
icon: mdiDownload,
|
||||
onAction: () => handleDownloadDatabaseBackup(filename),
|
||||
};
|
||||
|
||||
const Delete: ActionItem = {
|
||||
type: $t('command'),
|
||||
title: $t('delete'),
|
||||
icon: mdiTrashCanOutline,
|
||||
color: 'danger',
|
||||
onAction: () => handleDeleteDatabaseBackup(filename),
|
||||
};
|
||||
|
||||
return { Download, Delete };
|
||||
};
|
||||
|
||||
export const handleRestoreDatabaseBackup = async (filename: string) => {
|
||||
const $t = await getFormatter();
|
||||
const confirm = await modalManager.showDialog({
|
||||
confirmText: $t('restore'),
|
||||
title: $t('admin.maintenance_restore_backup'),
|
||||
prompt: $t('admin.maintenance_restore_backup_description'),
|
||||
});
|
||||
|
||||
if (!confirm) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await setMaintenanceMode({
|
||||
setMaintenanceModeDto: {
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: filename,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_start_error'));
|
||||
}
|
||||
};
|
||||
|
||||
export const handleDeleteDatabaseBackup = async (...filenames: string[]) => {
|
||||
const $t = await getFormatter();
|
||||
const confirm = await modalManager.showDialog({
|
||||
confirmText: $t('delete'),
|
||||
title: $t('admin.maintenance_delete_backup'),
|
||||
prompt: $t('admin.maintenance_delete_backup_description'),
|
||||
});
|
||||
|
||||
if (!confirm) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
for (const filename of filenames) {
|
||||
eventManager.emit('BackupDeleteStatus', { filename, isDeleting: true });
|
||||
}
|
||||
|
||||
await deleteDatabaseBackup({
|
||||
databaseBackupDeleteDto: {
|
||||
backups: filenames,
|
||||
},
|
||||
});
|
||||
|
||||
for (const filename of filenames) {
|
||||
eventManager.emit('BackupDeleted', { filename });
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_delete_error'));
|
||||
|
||||
for (const filename of filenames) {
|
||||
eventManager.emit('BackupDeleteStatus', { filename, isDeleting: false });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const handleDownloadDatabaseBackup = (filename: string) => {
|
||||
location.href = getBaseUrl() + '/admin/database-backups/' + filename;
|
||||
};
|
||||
|
||||
export const handleUploadDatabaseBackup = async () => {
|
||||
const $t = await getFormatter();
|
||||
|
||||
try {
|
||||
const [file] = await openFilePicker({ multiple: false });
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
await uploadRequest<DatabaseBackupUploadDto>({
|
||||
url: getBaseUrl() + '/admin/database-backups/upload',
|
||||
data: formData,
|
||||
onUploadProgress(event) {
|
||||
eventManager.emit('BackupUpload', { progress: event.loaded / event.total, isComplete: false });
|
||||
},
|
||||
});
|
||||
|
||||
eventManager.emit('BackupUpload', { progress: 1, isComplete: true });
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_upload_backup_error'));
|
||||
} finally {
|
||||
eventManager.emit('BackupUpload', { progress: -1, isComplete: false });
|
||||
}
|
||||
};
|
||||
31
web/src/lib/services/maintenance.service.ts
Normal file
31
web/src/lib/services/maintenance.service.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { handleError } from '$lib/utils/handle-error';
|
||||
import { getFormatter } from '$lib/utils/i18n';
|
||||
import { MaintenanceAction, setMaintenanceMode, type SetMaintenanceModeDto } from '@immich/sdk';
|
||||
import type { ActionItem } from '@immich/ui';
|
||||
import { mdiProgressWrench } from '@mdi/js';
|
||||
import type { MessageFormatter } from 'svelte-i18n';
|
||||
|
||||
export const getMaintenanceAdminActions = ($t: MessageFormatter) => {
|
||||
const StartMaintenance: ActionItem = {
|
||||
title: $t('admin.maintenance_start'),
|
||||
onAction: () =>
|
||||
handleSetMaintenanceMode({
|
||||
action: MaintenanceAction.Start,
|
||||
}),
|
||||
icon: mdiProgressWrench,
|
||||
};
|
||||
|
||||
return { StartMaintenance };
|
||||
};
|
||||
|
||||
export const handleSetMaintenanceMode = async (dto: SetMaintenanceModeDto) => {
|
||||
const $t = await getFormatter();
|
||||
|
||||
try {
|
||||
await setMaintenanceMode({
|
||||
setMaintenanceModeDto: dto,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_start_error'));
|
||||
}
|
||||
};
|
||||
@@ -1,4 +1,7 @@
|
||||
import { type MaintenanceAuthDto } from '@immich/sdk';
|
||||
import { type MaintenanceAuthDto, type MaintenanceStatusResponseDto } from '@immich/sdk';
|
||||
import { writable } from 'svelte/store';
|
||||
|
||||
export const maintenanceAuth = writable<MaintenanceAuthDto>();
|
||||
export const maintenanceStore = {
|
||||
auth: writable<MaintenanceAuthDto>(),
|
||||
status: writable<MaintenanceStatusResponseDto | undefined>(),
|
||||
};
|
||||
|
||||
@@ -2,10 +2,17 @@ import { page } from '$app/state';
|
||||
import { authManager } from '$lib/managers/auth-manager.svelte';
|
||||
import { eventManager } from '$lib/managers/event-manager.svelte';
|
||||
import { Route } from '$lib/route';
|
||||
import { maintenanceStore } from '$lib/stores/maintenance.store';
|
||||
import { notificationManager } from '$lib/stores/notification-manager.svelte';
|
||||
import type { ReleaseEvent } from '$lib/types';
|
||||
import { createEventEmitter } from '$lib/utils/eventemitter';
|
||||
import { type AssetResponseDto, type NotificationDto, type ServerVersionResponseDto } from '@immich/sdk';
|
||||
import {
|
||||
MaintenanceAction,
|
||||
type AssetResponseDto,
|
||||
type MaintenanceStatusResponseDto,
|
||||
type NotificationDto,
|
||||
type ServerVersionResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { io, type Socket } from 'socket.io-client';
|
||||
import { get, writable } from 'svelte/store';
|
||||
import { user } from './user.store';
|
||||
@@ -31,6 +38,8 @@ export interface Events {
|
||||
on_notification: (notification: NotificationDto) => void;
|
||||
|
||||
AppRestartV1: (event: AppRestartEvent) => void;
|
||||
|
||||
MaintenanceStatusV1: (event: MaintenanceStatusResponseDto) => void;
|
||||
AssetEditReadyV1: (data: { asset: { id: string } }) => void;
|
||||
}
|
||||
|
||||
@@ -55,6 +64,15 @@ websocket
|
||||
.on('disconnect', () => websocketStore.connected.set(false))
|
||||
.on('on_server_version', (serverVersion) => websocketStore.serverVersion.set(serverVersion))
|
||||
.on('AppRestartV1', (mode) => websocketStore.serverRestarting.set(mode))
|
||||
.on('MaintenanceStatusV1', (status) => {
|
||||
maintenanceStore.status.set(status);
|
||||
|
||||
if (status.action === MaintenanceAction.End) {
|
||||
websocketStore.serverRestarting.set({
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
}
|
||||
})
|
||||
.on('on_new_release', (event) => eventManager.emit('ReleaseEvent', event))
|
||||
.on('on_session_delete', () => authManager.logout())
|
||||
.on('on_user_delete', (id) => eventManager.emit('UserAdminDeleted', { id }))
|
||||
@@ -63,7 +81,7 @@ websocket
|
||||
|
||||
export const openWebsocketConnection = () => {
|
||||
try {
|
||||
if (get(user) || page.url.pathname.startsWith(Route.maintenanceMode())) {
|
||||
if (get(user) || get(websocketStore.serverRestarting) || page.url.pathname.startsWith(Route.maintenanceMode())) {
|
||||
websocket.connect();
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,4 +1,25 @@
|
||||
import type { ZoomImageWheelState } from '@zoom-image/core';
|
||||
import { writable } from 'svelte/store';
|
||||
import { derived, writable } from 'svelte/store';
|
||||
|
||||
export const photoZoomState = writable<ZoomImageWheelState>();
|
||||
export const photoZoomState = writable<ZoomImageWheelState>({
|
||||
currentRotation: 0,
|
||||
currentZoom: 1,
|
||||
enable: true,
|
||||
currentPositionX: 0,
|
||||
currentPositionY: 0,
|
||||
});
|
||||
|
||||
export const photoZoomTransform = derived(
|
||||
photoZoomState,
|
||||
($state) => `translate(${$state.currentPositionX}px,${$state.currentPositionY}px) scale(${$state.currentZoom})`,
|
||||
);
|
||||
|
||||
export const resetZoomState = () => {
|
||||
photoZoomState.set({
|
||||
currentRotation: 0,
|
||||
currentZoom: 1,
|
||||
enable: true,
|
||||
currentPositionX: 0,
|
||||
currentPositionY: 0,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -43,19 +43,23 @@ export const addDummyItems = () => {
|
||||
|
||||
export const uploadExecutionQueue = new ExecutorQueue({ concurrency: 2 });
|
||||
|
||||
type FilePickerParam = { multiple?: boolean; extensions?: string[] };
|
||||
type FileUploadParam = { multiple?: boolean; albumId?: string };
|
||||
|
||||
export const openFileUploadDialog = async (options: FileUploadParam = {}) => {
|
||||
const { albumId, multiple = true } = options;
|
||||
const extensions = uploadManager.getExtensions();
|
||||
export const openFilePicker = async (options: FilePickerParam = {}) => {
|
||||
const { multiple = true, extensions } = options;
|
||||
|
||||
return new Promise<string[]>((resolve, reject) => {
|
||||
return new Promise<File[]>((resolve, reject) => {
|
||||
try {
|
||||
const fileSelector = document.createElement('input');
|
||||
|
||||
fileSelector.type = 'file';
|
||||
fileSelector.multiple = multiple;
|
||||
fileSelector.accept = extensions.join(',');
|
||||
|
||||
if (extensions) {
|
||||
fileSelector.accept = extensions.join(',');
|
||||
}
|
||||
|
||||
fileSelector.addEventListener(
|
||||
'change',
|
||||
(e: Event) => {
|
||||
@@ -63,9 +67,9 @@ export const openFileUploadDialog = async (options: FileUploadParam = {}) => {
|
||||
if (!target.files) {
|
||||
return;
|
||||
}
|
||||
const files = Array.from(target.files);
|
||||
|
||||
resolve(fileUploadHandler({ files, albumId }));
|
||||
const files = Array.from(target.files);
|
||||
resolve(files);
|
||||
},
|
||||
{ passive: true },
|
||||
);
|
||||
@@ -78,6 +82,17 @@ export const openFileUploadDialog = async (options: FileUploadParam = {}) => {
|
||||
});
|
||||
};
|
||||
|
||||
export const openFileUploadDialog = async (options: FileUploadParam = {}) => {
|
||||
const { albumId, multiple = true } = options;
|
||||
const extensions = uploadManager.getExtensions();
|
||||
const files = await openFilePicker({
|
||||
multiple,
|
||||
extensions,
|
||||
});
|
||||
|
||||
return fileUploadHandler({ files, albumId });
|
||||
};
|
||||
|
||||
type FileUploadHandlerParams = Omit<FileUploaderParams, 'deviceAssetId' | 'assetFile'> & {
|
||||
files: File[];
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Route } from '$lib/route';
|
||||
import { maintenanceAuth as maintenanceAuth$ } from '$lib/stores/maintenance.store';
|
||||
import { maintenanceLogin } from '@immich/sdk';
|
||||
import { maintenanceStore } from '$lib/stores/maintenance.store';
|
||||
import { websocketStore } from '$lib/stores/websocket';
|
||||
import { getMaintenanceStatus, MaintenanceAction, maintenanceLogin } from '@immich/sdk';
|
||||
|
||||
export function maintenanceCreateUrl(url: URL) {
|
||||
return new URL(Route.maintenanceMode({ continue: url.pathname + url.search }), url.origin).href;
|
||||
@@ -24,8 +25,33 @@ export const loadMaintenanceAuth = async () => {
|
||||
},
|
||||
});
|
||||
|
||||
maintenanceAuth$.set(auth);
|
||||
maintenanceStore.auth.set(auth);
|
||||
} catch {
|
||||
// silently fail
|
||||
}
|
||||
};
|
||||
|
||||
export const loadMaintenanceStatus = async () => {
|
||||
while (true) {
|
||||
try {
|
||||
const status = await getMaintenanceStatus();
|
||||
maintenanceStore.status.set(status);
|
||||
|
||||
if (status.action === MaintenanceAction.End) {
|
||||
websocketStore.serverRestarting.set({
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
}
|
||||
|
||||
break;
|
||||
} catch (error) {
|
||||
const status = (error as { status: number })?.status;
|
||||
if (status && status >= 500 && status < 600) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -87,16 +87,16 @@
|
||||
showNavigationLoadingBar = false;
|
||||
});
|
||||
|
||||
const { serverRestarting } = websocketStore;
|
||||
|
||||
$effect.pre(() => {
|
||||
if ($user || page.url.pathname.startsWith(Route.maintenanceMode())) {
|
||||
if ($user || $serverRestarting || page.url.pathname.startsWith(Route.maintenanceMode())) {
|
||||
openWebsocketConnection();
|
||||
} else {
|
||||
closeWebsocketConnection();
|
||||
}
|
||||
});
|
||||
|
||||
const { serverRestarting } = websocketStore;
|
||||
|
||||
const onReleaseEvent = async (release: ReleaseEvent) => {
|
||||
if (!release.isAvailable || !$user.isAdmin) {
|
||||
return;
|
||||
|
||||
@@ -1,17 +1,35 @@
|
||||
<script lang="ts">
|
||||
import AuthPageLayout from '$lib/components/layouts/AuthPageLayout.svelte';
|
||||
import { Route } from '$lib/route';
|
||||
import { Button, Heading } from '@immich/ui';
|
||||
import { websocketStore } from '$lib/stores/websocket';
|
||||
import { handleError } from '$lib/utils/handle-error';
|
||||
import { startDatabaseRestoreFlow } from '@immich/sdk';
|
||||
import { Button, Heading, Stack } from '@immich/ui';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
async function switchToMaintenance() {
|
||||
try {
|
||||
websocketStore.serverRestarting.set({
|
||||
isMaintenanceMode: true,
|
||||
});
|
||||
|
||||
await startDatabaseRestoreFlow();
|
||||
} catch (error) {
|
||||
handleError(error, $t('admin.maintenance_start_error'));
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<AuthPageLayout>
|
||||
<div class="flex flex-col place-items-center text-center gap-12">
|
||||
<Heading size="large" color="primary" tag="h1">{$t('welcome_to_immich')}</Heading>
|
||||
<div>
|
||||
<Button href={Route.register()} size="medium" shape="round">
|
||||
<Stack>
|
||||
<Button href={Route.register()} size="large" shape="round">
|
||||
<span class="px-2 font-semibold">{$t('getting_started')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
<Button size="small" shape="round" variant="ghost" onclick={switchToMaintenance}>
|
||||
<span class="px-2 font-semibold">{$t('maintenance_restore_from_backup')}</span>
|
||||
</Button>
|
||||
</Stack>
|
||||
</div>
|
||||
</AuthPageLayout>
|
||||
|
||||
35
web/src/routes/admin/maintenance/+page.svelte
Normal file
35
web/src/routes/admin/maintenance/+page.svelte
Normal file
@@ -0,0 +1,35 @@
|
||||
<script lang="ts">
|
||||
import AdminPageLayout from '$lib/components/layouts/AdminPageLayout.svelte';
|
||||
import MaintenanceBackupsList from '$lib/components/maintenance/MaintenanceBackupsList.svelte';
|
||||
import SettingAccordionState from '$lib/components/shared-components/settings/setting-accordion-state.svelte';
|
||||
import SettingAccordion from '$lib/components/shared-components/settings/setting-accordion.svelte';
|
||||
import { QueryParameter } from '$lib/constants';
|
||||
import { getMaintenanceAdminActions } from '$lib/services/maintenance.service';
|
||||
import { mdiRefresh } from '@mdi/js';
|
||||
import { t } from 'svelte-i18n';
|
||||
import type { PageData } from './$types';
|
||||
|
||||
type Props = {
|
||||
data: PageData;
|
||||
};
|
||||
|
||||
const { data }: Props = $props();
|
||||
const { StartMaintenance } = $derived(getMaintenanceAdminActions($t));
|
||||
</script>
|
||||
|
||||
<AdminPageLayout breadcrumbs={[{ title: data.meta.title }]} actions={[StartMaintenance]}>
|
||||
<section id="setting-content" class="flex place-content-center sm:mx-4">
|
||||
<section class="w-full pb-28 sm:w-5/6 md:w-212.5">
|
||||
<SettingAccordionState queryParam={QueryParameter.IS_OPEN}>
|
||||
<SettingAccordion
|
||||
title={$t('admin.maintenance_restore_database_backup')}
|
||||
subtitle={$t('admin.maintenance_restore_database_backup_description')}
|
||||
icon={mdiRefresh}
|
||||
key="backups"
|
||||
>
|
||||
<MaintenanceBackupsList backups={data.backups} expectedVersion={data.expectedVersion} />
|
||||
</SettingAccordion>
|
||||
</SettingAccordionState>
|
||||
</section>
|
||||
</section>
|
||||
</AdminPageLayout>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user