mirror of
https://github.com/immich-app/immich.git
synced 2025-12-08 05:41:04 -08:00
Compare commits
124 Commits
popup-menu
...
feat/datab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e958516318 | ||
|
|
0d05c0d4ae | ||
|
|
4e2187acf9 | ||
|
|
adc2d5d1e5 | ||
|
|
6b9cc855a5 | ||
|
|
02265ba224 | ||
|
|
cf3686a509 | ||
|
|
3019091733 | ||
|
|
4296211c61 | ||
|
|
207a8bc55a | ||
|
|
a63b418507 | ||
|
|
fe8eb85e37 | ||
|
|
4659ceb425 | ||
|
|
17dfcedad6 | ||
|
|
20d1e610ce | ||
|
|
305bf60f97 | ||
|
|
f9d2a9707d | ||
|
|
ef944c29d3 | ||
|
|
274775d876 | ||
|
|
0945e18564 | ||
|
|
e0428b565a | ||
|
|
4f93eda8d8 | ||
|
|
f5df5fa98d | ||
|
|
f07d1441ea | ||
|
|
1bcf28c062 | ||
|
|
9b955508e9 | ||
|
|
62628dfcfa | ||
|
|
a79b4bdc47 | ||
|
|
94af1bba4d | ||
|
|
b11aecd184 | ||
|
|
b5ff460a55 | ||
|
|
8b1ba11e0b | ||
|
|
a7fd19db52 | ||
|
|
db7169ea01 | ||
|
|
cede65f2dd | ||
|
|
e355dccc48 | ||
|
|
8dd865d054 | ||
|
|
e3f350ea60 | ||
|
|
6ec10a5f15 | ||
|
|
9cb968116b | ||
|
|
52edcdee60 | ||
|
|
96426fec7e | ||
|
|
47f5232a5f | ||
|
|
a091ca76e7 | ||
|
|
c8fea45731 | ||
|
|
390f0b2817 | ||
|
|
1cdffeb3be | ||
|
|
87f34ba505 | ||
|
|
ca116caafb | ||
|
|
86b7b1c44d | ||
|
|
95d9bcb3f1 | ||
|
|
0f145a5b52 | ||
|
|
481ec02edb | ||
|
|
9f5f90b2ff | ||
|
|
1ad2282166 | ||
|
|
b99d92961c | ||
|
|
45b5752cbf | ||
|
|
220d63e035 | ||
|
|
3be039b953 | ||
|
|
e2ca0c6f67 | ||
|
|
f84bdc14d5 | ||
|
|
fd6f043aa4 | ||
|
|
5a6083f53c | ||
|
|
a61f9d7a26 | ||
|
|
3863ff73ef | ||
|
|
534a9f50b6 | ||
|
|
b46d6cda65 | ||
|
|
86d8e1a092 | ||
|
|
0940c313ac | ||
|
|
f6316ca0c8 | ||
|
|
539167eb88 | ||
|
|
5bca8808a1 | ||
|
|
e93652a4a5 | ||
|
|
ac9a587063 | ||
|
|
f7b59f50ed | ||
|
|
53ef26a5e4 | ||
|
|
6cefb9ca95 | ||
|
|
fdacf0ec57 | ||
|
|
cbf3a2c3cb | ||
|
|
d2a4dd67d8 | ||
|
|
874782edf0 | ||
|
|
a7245627fc | ||
|
|
174670a1b7 | ||
|
|
a3c6d71a58 | ||
|
|
19ba23056c | ||
|
|
3d2d7fa64c | ||
|
|
fccb31d1d8 | ||
|
|
8405a9bf0c | ||
|
|
3933b23e2c | ||
|
|
824f6e5b05 | ||
|
|
270d7e3cdc | ||
|
|
8463968712 | ||
|
|
5be08274ff | ||
|
|
161918e9ca | ||
|
|
d6e3d26cfc | ||
|
|
d5351de26f | ||
|
|
ed4a850a01 | ||
|
|
9d4ad11cff | ||
|
|
b887d4f557 | ||
|
|
2e15012257 | ||
|
|
56a4159295 | ||
|
|
f69c49a60f | ||
|
|
f778a4260b | ||
|
|
31f4665d35 | ||
|
|
53a74a7279 | ||
|
|
dd1cf12aaa | ||
|
|
31410c3c20 | ||
|
|
26587dd690 | ||
|
|
442fe6e3d0 | ||
|
|
af741a4761 | ||
|
|
7c2e8b1d62 | ||
|
|
56c93a71c0 | ||
|
|
c090a1a9d9 | ||
|
|
d040de2d52 | ||
|
|
73ae766d9f | ||
|
|
edc1333db1 | ||
|
|
b01b63b25a | ||
|
|
7e7d6af66b | ||
|
|
0ae03f68cf | ||
|
|
0419539c08 | ||
|
|
f67153e44b | ||
|
|
cc7895244d | ||
|
|
a6fb942fca | ||
|
|
9cea3d7b2f |
2
.github/workflows/cli.yml
vendored
2
.github/workflows/cli.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:73a05fc805dfd3bd29bebc08442aedfec5c419c5ad3421ec73edc5647233891a
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:237cdae7783609c96f18037a513d38088713cf4a2e493a3aa136d0c45490749a
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/autobuild@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -83,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -572,7 +572,7 @@ jobs:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
# python-version: 3.11
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,7 +7,7 @@
|
||||
.idea
|
||||
|
||||
docker/upload
|
||||
docker/library
|
||||
docker/library*
|
||||
uploads
|
||||
coverage
|
||||
|
||||
|
||||
267
e2e/src/api/specs/database-backups.e2e-spec.ts
Normal file
267
e2e/src/api/specs/database-backups.e2e-spec.ts
Normal file
@@ -0,0 +1,267 @@
|
||||
import { LoginResponseDto, ManualJobName } from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/admin/database-backups', () => {
|
||||
let cookie: string | undefined;
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
describe('GET /', async () => {
|
||||
it('should succeed and be empty', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
backups: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should contain a created backup', async () => {
|
||||
await utils.createJob(admin.accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'backupDatabase');
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [expect.stringMatching(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/)],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /', async () => {
|
||||
it('should delete backup', async () => {
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete(`/admin/database-backups`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ backups: [filename] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const { status: listStatus, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(listStatus).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database flow
|
||||
|
||||
describe.sequential('POST /start-restore', () => {
|
||||
afterAll(async () => {
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({ action: 'end' });
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
it.sequential('should not work when the server is configured', async () => {
|
||||
const { status, body } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('The server already has an admin'));
|
||||
});
|
||||
|
||||
it.sequential('should enter maintenance mode in "database restore mode"', async () => {
|
||||
await utils.resetDatabase(); // reset database before running this test
|
||||
|
||||
const { status, headers } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database
|
||||
|
||||
describe.sequential('POST /backups/restore', () => {
|
||||
beforeAll(async () => {
|
||||
await utils.disconnectDatabase();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await utils.connectDatabase();
|
||||
});
|
||||
|
||||
it.sequential('should restore a backup', { timeout: 60_000 }, async () => {
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: filename,
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
}),
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 60_000,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
});
|
||||
|
||||
it.sequential('fail to restore a corrupted backup', { timeout: 60_000 }, async () => {
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: 'development-corrupted.sql.gz',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
|
||||
const { status: status2, body: body2 } = await request(app)
|
||||
.get('/admin/maintenance/status')
|
||||
.set('cookie', cookie!)
|
||||
.send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body2).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: expect.stringContaining('IM CORRUPTED'),
|
||||
}),
|
||||
);
|
||||
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1006,7 +1006,7 @@ describe('/libraries', () => {
|
||||
rmSync(`${testAssetDir}/temp/xmp`, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should switch from using file metadata to file.xmp metadata when asset refreshes', async () => {
|
||||
it('should switch from using file metadata to file.ext.xmp metadata when asset refreshes', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/xmp`],
|
||||
|
||||
@@ -14,6 +14,7 @@ describe('/admin/maintenance', () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
// => outside of maintenance mode
|
||||
@@ -26,6 +27,17 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to always indicate we are not in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should not work out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({ token: 'token' });
|
||||
@@ -39,6 +51,7 @@ describe('/admin/maintenance', () => {
|
||||
describe.sequential('POST /', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance').send({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
expect(status).toBe(401);
|
||||
@@ -69,6 +82,7 @@ describe('/admin/maintenance', () => {
|
||||
.send({
|
||||
action: 'start',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
@@ -79,12 +93,13 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
@@ -102,6 +117,17 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to indicate we are in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'start',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should fail without cookie or token in body', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({});
|
||||
@@ -158,12 +184,13 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
|
||||
@@ -6,7 +6,9 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
JobCreateDto,
|
||||
MaintenanceAction,
|
||||
ManualJobName,
|
||||
MetadataSearchDto,
|
||||
Permission,
|
||||
PersonCreateDto,
|
||||
@@ -21,6 +23,7 @@ import {
|
||||
checkExistingAssets,
|
||||
createAlbum,
|
||||
createApiKey,
|
||||
createJob,
|
||||
createLibrary,
|
||||
createPartner,
|
||||
createPerson,
|
||||
@@ -28,10 +31,12 @@ import {
|
||||
createStack,
|
||||
createUserAdmin,
|
||||
deleteAssets,
|
||||
deleteDatabaseBackup,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
getQueuesLegacy,
|
||||
listDatabaseBackups,
|
||||
login,
|
||||
runQueueCommandLegacy,
|
||||
scanLibrary,
|
||||
@@ -52,11 +57,15 @@ import {
|
||||
import { BrowserContext } from '@playwright/test';
|
||||
import { exec, spawn } from 'node:child_process';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { createWriteStream, existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { mkdtemp } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { setTimeout as setAsyncTimeout } from 'node:timers/promises';
|
||||
import { promisify } from 'node:util';
|
||||
import { createGzip } from 'node:zlib';
|
||||
import pg from 'pg';
|
||||
import { io, type Socket } from 'socket.io-client';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
@@ -84,8 +93,9 @@ export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer $
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
export const dockerExec = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', args.join(' ')]);
|
||||
export const immichAdmin = (args: string[]) => dockerExec([`immich-admin ${args.join(' ')}`]);
|
||||
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
|
||||
export const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
@@ -149,12 +159,26 @@ const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||
};
|
||||
|
||||
export const utils = {
|
||||
connectDatabase: async () => {
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
client.on('end', () => (client = null));
|
||||
client.on('error', () => (client = null));
|
||||
await client.connect();
|
||||
}
|
||||
|
||||
return client;
|
||||
},
|
||||
|
||||
disconnectDatabase: async () => {
|
||||
if (client) {
|
||||
await client.end();
|
||||
}
|
||||
},
|
||||
|
||||
resetDatabase: async (tables?: string[]) => {
|
||||
try {
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
await client.connect();
|
||||
}
|
||||
client = await utils.connectDatabase();
|
||||
|
||||
tables = tables || [
|
||||
// TODO e2e test for deleting a stack, since it is quite complex
|
||||
@@ -481,6 +505,9 @@ export const utils = {
|
||||
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
|
||||
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
createJob: async (accessToken: string, jobCreateDto: JobCreateDto) =>
|
||||
createJob({ jobCreateDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
|
||||
runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
@@ -559,6 +586,36 @@ export const utils = {
|
||||
mkdirSync(`${testAssetDir}/temp`, { recursive: true });
|
||||
},
|
||||
|
||||
createBackup: async (accessToken: string) => {
|
||||
await utils.createJob(accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
return await utils.poll(
|
||||
() => request(app).get('/admin/database-backups').set('Authorization', `Bearer ${accessToken}`),
|
||||
({ status, body }) => status === 200 && body.backups.length === 1,
|
||||
({ body }) => body.backups[0],
|
||||
);
|
||||
},
|
||||
|
||||
resetBackups: async (accessToken: string) => {
|
||||
const { backups } = await listDatabaseBackups({ headers: asBearerAuth(accessToken) });
|
||||
await deleteDatabaseBackup({ databaseBackupDeleteDto: { backups } }, { headers: asBearerAuth(accessToken) });
|
||||
},
|
||||
|
||||
prepareTestBackup: async (generate: 'corrupted') => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'test-'));
|
||||
const fn = join(dir, 'file');
|
||||
|
||||
const sql = Readable.from('IM CORRUPTED;');
|
||||
const gzip = createGzip();
|
||||
const writeStream = createWriteStream(fn);
|
||||
await pipeline(sql, gzip, writeStream);
|
||||
|
||||
await executeCommand('docker', ['cp', fn, `immich-e2e-server:/data/backups/development-${generate}.sql.gz`])
|
||||
.promise;
|
||||
},
|
||||
|
||||
resetAdminConfig: async (accessToken: string) => {
|
||||
const defaultConfig = await getConfigDefaults({ headers: asBearerAuth(accessToken) });
|
||||
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
|
||||
@@ -601,6 +658,25 @@ export const utils = {
|
||||
await utils.waitForQueueFinish(accessToken, 'sidecar');
|
||||
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
|
||||
},
|
||||
|
||||
async poll<T>(cb: () => Promise<T>, validate: (value: T) => boolean, map?: (value: T) => any) {
|
||||
let timeout = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const data = await cb();
|
||||
if (validate(data)) {
|
||||
return map ? map(data) : data;
|
||||
}
|
||||
timeout++;
|
||||
if (timeout >= 10) {
|
||||
throw 'Could not clean up test.';
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 5e2));
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
utils.initSdk();
|
||||
|
||||
75
e2e/src/web/specs/database-backups.e2e-spec.ts
Normal file
75
e2e/src/web/specs/database-backups.e2e-spec.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
|
||||
test.describe('Database Backups', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
test('restore a backup from settings', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/admin/maintenance**', { timeout: 60_000 });
|
||||
});
|
||||
|
||||
test('handle backup restore failure', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await expect(page.getByText('IM CORRUPTED')).toBeVisible({ timeout: 60_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/admin/maintenance**');
|
||||
});
|
||||
|
||||
test('restore a backup from onboarding', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
await utils.resetDatabase();
|
||||
|
||||
await page.goto('/');
|
||||
await page.getByRole('button', { name: 'Restore from backup' }).click();
|
||||
|
||||
try {
|
||||
await page.waitForURL('/maintenance**');
|
||||
} catch {
|
||||
// when chained with the rest of the tests
|
||||
// this navigation may fail..? not sure why...
|
||||
await page.goto('/maintenance');
|
||||
await page.waitForURL('/maintenance**');
|
||||
}
|
||||
|
||||
await page.getByRole('button', { name: 'Next' }).click();
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/photos', { timeout: 60_000 });
|
||||
});
|
||||
});
|
||||
@@ -16,12 +16,12 @@ test.describe('Maintenance', () => {
|
||||
test('enter and exit maintenance mode', async ({ context, page }) => {
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/system-settings?isOpen=maintenance');
|
||||
await page.getByRole('button', { name: 'Start maintenance mode' }).click();
|
||||
await page.goto('/admin/maintenance');
|
||||
await page.getByRole('button', { name: 'Switch to maintenance mode' }).click();
|
||||
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible({ timeout: 10_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('**/admin/system-settings*', { timeout: 10_000 });
|
||||
await page.waitForURL('**/admin/maintenance*', { timeout: 10_000 });
|
||||
});
|
||||
|
||||
test('maintenance shows no options to users until they authenticate', async ({ page }) => {
|
||||
|
||||
33
i18n/en.json
33
i18n/en.json
@@ -182,10 +182,19 @@
|
||||
"machine_learning_smart_search_enabled": "Enable smart search",
|
||||
"machine_learning_smart_search_enabled_description": "If disabled, images will not be encoded for smart search.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.",
|
||||
"maintenance_delete_backup": "Delete Backup",
|
||||
"maintenance_delete_backup_description": "This file will be irrevocably deleted.",
|
||||
"maintenance_delete_error": "Failed to delete backup.",
|
||||
"maintenance_restore_backup": "Restore Backup",
|
||||
"maintenance_restore_backup_description": "Immich will be wiped and restored from the chosen backup. A backup will be created before continuing.",
|
||||
"maintenance_restore_database_backup": "Restore database backup",
|
||||
"maintenance_restore_database_backup_description": "Rollback to an earlier database state using a backup file",
|
||||
"maintenance_settings": "Maintenance",
|
||||
"maintenance_settings_description": "Put Immich into maintenance mode.",
|
||||
"maintenance_start": "Start maintenance mode",
|
||||
"maintenance_start": "Switch to maintenance mode",
|
||||
"maintenance_start_error": "Failed to start maintenance mode.",
|
||||
"maintenance_upload_backup": "Upload database backup file",
|
||||
"maintenance_upload_backup_error": "Could not upload backup, is it an .sql/.sql.gz file?",
|
||||
"manage_concurrency": "Manage Concurrency",
|
||||
"manage_concurrency_description": "Navigate to the jobs page to manage job concurrency",
|
||||
"manage_log_settings": "Manage log settings",
|
||||
@@ -798,6 +807,12 @@
|
||||
"create_user": "Create user",
|
||||
"created": "Created",
|
||||
"created_at": "Created",
|
||||
"created_day_ago": "Created 1 day ago",
|
||||
"created_days_ago": "Created {count} days ago",
|
||||
"created_hour_ago": "Created 1 hour ago",
|
||||
"created_hours_ago": "Created {count} hours ago",
|
||||
"created_minute_ago": "Created 1 minute ago",
|
||||
"created_minutes_ago": "Created {count} minutes ago",
|
||||
"creating_linked_albums": "Creating linked albums...",
|
||||
"crop": "Crop",
|
||||
"curated_object_page_title": "Things",
|
||||
@@ -1335,10 +1350,26 @@
|
||||
"loop_videos_description": "Enable to automatically loop a video in the detail viewer.",
|
||||
"main_branch_warning": "You're using a development version; we strongly recommend using a release version!",
|
||||
"main_menu": "Main menu",
|
||||
"maintenance_action_restore": "Restoring Database",
|
||||
"maintenance_description": "Immich has been put into <link>maintenance mode</link>.",
|
||||
"maintenance_end": "End maintenance mode",
|
||||
"maintenance_end_error": "Failed to end maintenance mode.",
|
||||
"maintenance_logged_in_as": "Currently logged in as {user}",
|
||||
"maintenance_restore_from_backup": "Restore From Backup",
|
||||
"maintenance_restore_library": "Restore Your Library",
|
||||
"maintenance_restore_library_confirm": "If this looks correct, continue to restoring a backup!",
|
||||
"maintenance_restore_library_description": "Restoring Database",
|
||||
"maintenance_restore_library_folder_has_files": "{folder} has {count} folder(s)",
|
||||
"maintenance_restore_library_folder_no_files": "{folder} is missing files!",
|
||||
"maintenance_restore_library_folder_pass": "readable and writable",
|
||||
"maintenance_restore_library_folder_read_fail": "not readable",
|
||||
"maintenance_restore_library_folder_write_fail": "not writable",
|
||||
"maintenance_restore_library_hint_missing_files": "You may be missing important files",
|
||||
"maintenance_restore_library_hint_regenerate_later": "You can regenerate these later in settings",
|
||||
"maintenance_restore_library_hint_storage_template_missing_files": "Using storage template? You may be missing files",
|
||||
"maintenance_restore_library_loading": "Loading integrity checks and heuristics…",
|
||||
"maintenance_task_backup": "Creating a backup of the existing database…",
|
||||
"maintenance_task_restore": "Restoring the chosen backup…",
|
||||
"maintenance_title": "Temporarily Unavailable",
|
||||
"make": "Make",
|
||||
"manage_geolocation": "Manage location",
|
||||
|
||||
@@ -3,7 +3,7 @@ experimental_monorepo_root = true
|
||||
[tools]
|
||||
node = "24.11.1"
|
||||
flutter = "3.35.7"
|
||||
pnpm = "10.22.0"
|
||||
pnpm = "10.24.0"
|
||||
terragrunt = "0.93.10"
|
||||
opentofu = "1.10.7"
|
||||
java = "25.0.1"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:timezone/timezone.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
|
||||
extension TZExtension on Asset {
|
||||
/// Returns the created time of the asset from the exif info (if available) or from
|
||||
@@ -7,24 +7,11 @@ extension TZExtension on Asset {
|
||||
/// the timezone offset in [Duration]
|
||||
(DateTime, Duration) getTZAdjustedTimeAndOffset() {
|
||||
DateTime dt = fileCreatedAt.toLocal();
|
||||
|
||||
if (exifInfo?.dateTimeOriginal != null) {
|
||||
dt = exifInfo!.dateTimeOriginal!;
|
||||
if (exifInfo?.timeZone != null) {
|
||||
dt = dt.toUtc();
|
||||
try {
|
||||
final location = getLocation(exifInfo!.timeZone!);
|
||||
dt = TZDateTime.from(dt, location);
|
||||
} on LocationNotFoundException {
|
||||
RegExp re = RegExp(r'^utc(?:([+-]\d{1,2})(?::(\d{2}))?)?$', caseSensitive: false);
|
||||
final m = re.firstMatch(exifInfo!.timeZone!);
|
||||
if (m != null) {
|
||||
final duration = Duration(hours: int.parse(m.group(1) ?? '0'), minutes: int.parse(m.group(2) ?? '0'));
|
||||
dt = dt.add(duration);
|
||||
return (dt, duration);
|
||||
}
|
||||
}
|
||||
}
|
||||
return applyTimezoneOffset(dateTime: exifInfo!.dateTimeOriginal!, timeZone: exifInfo?.timeZone);
|
||||
}
|
||||
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ class AddActionButton extends ConsumerWidget {
|
||||
final size = renderObject.size;
|
||||
final position = renderObject.localToGlobal(Offset.zero);
|
||||
|
||||
return RelativeRect.fromLTRB(position.dx, position.dy - size.height - 225, position.dx + size.width, position.dy);
|
||||
return RelativeRect.fromLTRB(position.dx, position.dy - size.height - 200, position.dx + size.width, position.dy);
|
||||
}
|
||||
|
||||
void _openAlbumSelector(BuildContext context, WidgetRef ref) {
|
||||
|
||||
@@ -10,6 +10,7 @@ import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/exif.model.dart';
|
||||
import 'package:immich_mobile/domain/models/setting.model.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/duration_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
|
||||
@@ -29,6 +30,7 @@ import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/utils/action_button.utils.dart';
|
||||
import 'package:immich_mobile/utils/bytes_units.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_toast.dart';
|
||||
|
||||
const _kSeparator = ' • ';
|
||||
@@ -85,13 +87,21 @@ class AssetDetailBottomSheet extends ConsumerWidget {
|
||||
class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
const _AssetDetailBottomSheet();
|
||||
|
||||
String _getDateTime(BuildContext ctx, BaseAsset asset) {
|
||||
final dateTime = asset.createdAt.toLocal();
|
||||
String _getDateTime(BuildContext ctx, BaseAsset asset, ExifInfo? exifInfo) {
|
||||
DateTime dateTime = asset.createdAt.toLocal();
|
||||
Duration timeZoneOffset = dateTime.timeZoneOffset;
|
||||
|
||||
// Use EXIF timezone information if available (matching web app behavior)
|
||||
if (exifInfo?.dateTimeOriginal != null) {
|
||||
(dateTime, timeZoneOffset) = applyTimezoneOffset(
|
||||
dateTime: exifInfo!.dateTimeOriginal!,
|
||||
timeZone: exifInfo.timeZone,
|
||||
);
|
||||
}
|
||||
|
||||
final date = DateFormat.yMMMEd(ctx.locale.toLanguageTag()).format(dateTime);
|
||||
final time = DateFormat.jm(ctx.locale.toLanguageTag()).format(dateTime);
|
||||
final timezone = dateTime.timeZoneOffset.isNegative
|
||||
? 'UTC-${dateTime.timeZoneOffset.inHours.abs().toString().padLeft(2, '0')}:${(dateTime.timeZoneOffset.inMinutes.abs() % 60).toString().padLeft(2, '0')}'
|
||||
: 'UTC+${dateTime.timeZoneOffset.inHours.toString().padLeft(2, '0')}:${(dateTime.timeZoneOffset.inMinutes.abs() % 60).toString().padLeft(2, '0')}';
|
||||
final timezone = 'GMT${timeZoneOffset.formatAsOffset()}';
|
||||
return '$date$_kSeparator$time $timezone';
|
||||
}
|
||||
|
||||
@@ -269,7 +279,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
children: [
|
||||
// Asset Date and Time
|
||||
SheetTile(
|
||||
title: _getDateTime(context, asset),
|
||||
title: _getDateTime(context, asset, exifInfo),
|
||||
titleStyle: context.textTheme.bodyMedium?.copyWith(fontWeight: FontWeight.w600),
|
||||
trailing: asset.hasRemote && isOwner ? const Icon(Icons.edit, size: 18) : null,
|
||||
onTap: asset.hasRemote && isOwner ? () async => await _editDateTime(context, ref) : null,
|
||||
|
||||
@@ -15,6 +15,7 @@ import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/repositories/download.repository.dart';
|
||||
import 'package:immich_mobile/repositories/drift_album_api_repository.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:immich_mobile/widgets/common/date_time_picker.dart';
|
||||
import 'package:immich_mobile/widgets/common/location_picker.dart';
|
||||
import 'package:maplibre_gl/maplibre_gl.dart' as maplibre;
|
||||
@@ -175,9 +176,17 @@ class ActionService {
|
||||
}
|
||||
|
||||
final exifData = await _remoteAssetRepository.getExif(assetId);
|
||||
initialDate = asset.createdAt.toLocal();
|
||||
offset = initialDate.timeZoneOffset;
|
||||
timeZone = exifData?.timeZone;
|
||||
|
||||
// Use EXIF timezone information if available (matching web app and display behavior)
|
||||
DateTime dt = asset.createdAt.toLocal();
|
||||
offset = dt.timeZoneOffset;
|
||||
|
||||
if (exifData?.dateTimeOriginal != null) {
|
||||
timeZone = exifData!.timeZone;
|
||||
(dt, offset) = applyTimezoneOffset(dateTime: exifData.dateTimeOriginal!, timeZone: exifData.timeZone);
|
||||
}
|
||||
|
||||
initialDate = dt;
|
||||
}
|
||||
|
||||
final dateTime = await showDateTimePicker(
|
||||
|
||||
35
mobile/lib/utils/timezone.dart
Normal file
35
mobile/lib/utils/timezone.dart
Normal file
@@ -0,0 +1,35 @@
|
||||
import 'package:timezone/timezone.dart';
|
||||
|
||||
/// Applies timezone conversion to a DateTime using EXIF timezone information.
|
||||
///
|
||||
/// This function handles two timezone formats:
|
||||
/// 1. Named timezone locations (e.g., "Asia/Hong_Kong")
|
||||
/// 2. UTC offset format (e.g., "UTC+08:00", "UTC-05:00")
|
||||
///
|
||||
/// Returns a tuple of (adjusted DateTime, timezone offset Duration)
|
||||
(DateTime, Duration) applyTimezoneOffset({required DateTime dateTime, required String? timeZone}) {
|
||||
DateTime dt = dateTime.toUtc();
|
||||
|
||||
if (timeZone == null) {
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to get timezone location from database
|
||||
final location = getLocation(timeZone);
|
||||
dt = TZDateTime.from(dt, location);
|
||||
return (dt, dt.timeZoneOffset);
|
||||
} on LocationNotFoundException {
|
||||
// Handle UTC offset format (e.g., "UTC+08:00")
|
||||
RegExp re = RegExp(r'^utc(?:([+-]\d{1,2})(?::(\d{2}))?)?$', caseSensitive: false);
|
||||
final m = re.firstMatch(timeZone);
|
||||
if (m != null) {
|
||||
final duration = Duration(hours: int.parse(m.group(1) ?? '0'), minutes: int.parse(m.group(2) ?? '0'));
|
||||
dt = dt.add(duration);
|
||||
return (dt, duration);
|
||||
}
|
||||
}
|
||||
|
||||
// If timezone is invalid, return UTC
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
13
mobile/openapi/README.md
generated
13
mobile/openapi/README.md
generated
@@ -133,6 +133,11 @@ Class | Method | HTTP request | Description
|
||||
*AuthenticationApi* | [**unlockAuthSession**](doc//AuthenticationApi.md#unlockauthsession) | **POST** /auth/session/unlock | Unlock auth session
|
||||
*AuthenticationApi* | [**validateAccessToken**](doc//AuthenticationApi.md#validateaccesstoken) | **POST** /auth/validateToken | Validate access token
|
||||
*AuthenticationAdminApi* | [**unlinkAllOAuthAccountsAdmin**](doc//AuthenticationAdminApi.md#unlinkalloauthaccountsadmin) | **POST** /admin/auth/unlink-all | Unlink all OAuth accounts
|
||||
*DatabaseBackupsAdminApi* | [**deleteDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#deletedatabasebackup) | **DELETE** /admin/database-backups | Delete database backup
|
||||
*DatabaseBackupsAdminApi* | [**downloadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#downloaddatabasebackup) | **GET** /admin/database-backups/{filename} | Download database backup
|
||||
*DatabaseBackupsAdminApi* | [**listDatabaseBackups**](doc//DatabaseBackupsAdminApi.md#listdatabasebackups) | **GET** /admin/database-backups | List database backups
|
||||
*DatabaseBackupsAdminApi* | [**startDatabaseRestoreFlow**](doc//DatabaseBackupsAdminApi.md#startdatabaserestoreflow) | **POST** /admin/database-backups/start-restore | Start database backup restore flow
|
||||
*DatabaseBackupsAdminApi* | [**uploadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#uploaddatabasebackup) | **POST** /admin/database-backups/upload | Upload database backup
|
||||
*DeprecatedApi* | [**createPartnerDeprecated**](doc//DeprecatedApi.md#createpartnerdeprecated) | **POST** /partners/{id} | Create a partner
|
||||
*DeprecatedApi* | [**getAllUserAssetsByDeviceId**](doc//DeprecatedApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | Retrieve assets by device ID
|
||||
*DeprecatedApi* | [**getDeltaSync**](doc//DeprecatedApi.md#getdeltasync) | **POST** /sync/delta-sync | Get delta sync for user
|
||||
@@ -161,6 +166,8 @@ Class | Method | HTTP request | Description
|
||||
*LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | Scan a library
|
||||
*LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | Update a library
|
||||
*LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | Validate library settings
|
||||
*MaintenanceAdminApi* | [**detectPriorInstall**](doc//MaintenanceAdminApi.md#detectpriorinstall) | **GET** /admin/maintenance/detect-install | Detect existing install
|
||||
*MaintenanceAdminApi* | [**getMaintenanceStatus**](doc//MaintenanceAdminApi.md#getmaintenancestatus) | **GET** /admin/maintenance/status | Get maintenance mode status
|
||||
*MaintenanceAdminApi* | [**maintenanceLogin**](doc//MaintenanceAdminApi.md#maintenancelogin) | **POST** /admin/maintenance/login | Log into maintenance mode
|
||||
*MaintenanceAdminApi* | [**setMaintenanceMode**](doc//MaintenanceAdminApi.md#setmaintenancemode) | **POST** /admin/maintenance | Set maintenance mode
|
||||
*MapApi* | [**getMapMarkers**](doc//MapApi.md#getmapmarkers) | **GET** /map/markers | Retrieve map markers
|
||||
@@ -387,6 +394,8 @@ Class | Method | HTTP request | Description
|
||||
- [CreateLibraryDto](doc//CreateLibraryDto.md)
|
||||
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
||||
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
|
||||
- [DatabaseBackupDeleteDto](doc//DatabaseBackupDeleteDto.md)
|
||||
- [DatabaseBackupListResponseDto](doc//DatabaseBackupListResponseDto.md)
|
||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||
- [DownloadResponse](doc//DownloadResponse.md)
|
||||
@@ -416,7 +425,10 @@ Class | Method | HTTP request | Description
|
||||
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
|
||||
- [MaintenanceAction](doc//MaintenanceAction.md)
|
||||
- [MaintenanceAuthDto](doc//MaintenanceAuthDto.md)
|
||||
- [MaintenanceDetectInstallResponseDto](doc//MaintenanceDetectInstallResponseDto.md)
|
||||
- [MaintenanceDetectInstallStorageFolderDto](doc//MaintenanceDetectInstallStorageFolderDto.md)
|
||||
- [MaintenanceLoginDto](doc//MaintenanceLoginDto.md)
|
||||
- [MaintenanceStatusResponseDto](doc//MaintenanceStatusResponseDto.md)
|
||||
- [ManualJobName](doc//ManualJobName.md)
|
||||
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
|
||||
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)
|
||||
@@ -528,6 +540,7 @@ Class | Method | HTTP request | Description
|
||||
- [StackResponseDto](doc//StackResponseDto.md)
|
||||
- [StackUpdateDto](doc//StackUpdateDto.md)
|
||||
- [StatisticsSearchDto](doc//StatisticsSearchDto.md)
|
||||
- [StorageFolder](doc//StorageFolder.md)
|
||||
- [SyncAckDeleteDto](doc//SyncAckDeleteDto.md)
|
||||
- [SyncAckDto](doc//SyncAckDto.md)
|
||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||
|
||||
7
mobile/openapi/lib/api.dart
generated
7
mobile/openapi/lib/api.dart
generated
@@ -36,6 +36,7 @@ part 'api/albums_api.dart';
|
||||
part 'api/assets_api.dart';
|
||||
part 'api/authentication_api.dart';
|
||||
part 'api/authentication_admin_api.dart';
|
||||
part 'api/database_backups_admin_api.dart';
|
||||
part 'api/deprecated_api.dart';
|
||||
part 'api/download_api.dart';
|
||||
part 'api/duplicates_api.dart';
|
||||
@@ -139,6 +140,8 @@ part 'model/create_album_dto.dart';
|
||||
part 'model/create_library_dto.dart';
|
||||
part 'model/create_profile_image_response_dto.dart';
|
||||
part 'model/database_backup_config.dart';
|
||||
part 'model/database_backup_delete_dto.dart';
|
||||
part 'model/database_backup_list_response_dto.dart';
|
||||
part 'model/download_archive_info.dart';
|
||||
part 'model/download_info_dto.dart';
|
||||
part 'model/download_response.dart';
|
||||
@@ -168,7 +171,10 @@ part 'model/logout_response_dto.dart';
|
||||
part 'model/machine_learning_availability_checks_dto.dart';
|
||||
part 'model/maintenance_action.dart';
|
||||
part 'model/maintenance_auth_dto.dart';
|
||||
part 'model/maintenance_detect_install_response_dto.dart';
|
||||
part 'model/maintenance_detect_install_storage_folder_dto.dart';
|
||||
part 'model/maintenance_login_dto.dart';
|
||||
part 'model/maintenance_status_response_dto.dart';
|
||||
part 'model/manual_job_name.dart';
|
||||
part 'model/map_marker_response_dto.dart';
|
||||
part 'model/map_reverse_geocode_response_dto.dart';
|
||||
@@ -280,6 +286,7 @@ part 'model/stack_create_dto.dart';
|
||||
part 'model/stack_response_dto.dart';
|
||||
part 'model/stack_update_dto.dart';
|
||||
part 'model/statistics_search_dto.dart';
|
||||
part 'model/storage_folder.dart';
|
||||
part 'model/sync_ack_delete_dto.dart';
|
||||
part 'model/sync_ack_dto.dart';
|
||||
part 'model/sync_ack_set_dto.dart';
|
||||
|
||||
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
Normal file
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
Normal file
@@ -0,0 +1,269 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class DatabaseBackupsAdminApi {
|
||||
DatabaseBackupsAdminApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<Response> deleteDatabaseBackupWithHttpInfo(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody = databaseBackupDeleteDto;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['application/json'];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'DELETE',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<void> deleteDatabaseBackup(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
final response = await deleteDatabaseBackupWithHttpInfo(databaseBackupDeleteDto,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<Response> downloadDatabaseBackupWithHttpInfo(String filename,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/{filename}'
|
||||
.replaceAll('{filename}', filename);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<MultipartFile?> downloadDatabaseBackup(String filename,) async {
|
||||
final response = await downloadDatabaseBackupWithHttpInfo(filename,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> listDatabaseBackupsWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
Future<DatabaseBackupListResponseDto?> listDatabaseBackups() async {
|
||||
final response = await listDatabaseBackupsWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'DatabaseBackupListResponseDto',) as DatabaseBackupListResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> startDatabaseRestoreFlowWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/start-restore';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
Future<void> startDatabaseRestoreFlow() async {
|
||||
final response = await startDatabaseRestoreFlowWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<Response> uploadDatabaseBackupWithHttpInfo({ MultipartFile? file, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/upload';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['multipart/form-data'];
|
||||
|
||||
bool hasFields = false;
|
||||
final mp = MultipartRequest('POST', Uri.parse(apiPath));
|
||||
if (file != null) {
|
||||
hasFields = true;
|
||||
mp.fields[r'file'] = file.field;
|
||||
mp.files.add(file);
|
||||
}
|
||||
if (hasFields) {
|
||||
postBody = mp;
|
||||
}
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<void> uploadDatabaseBackup({ MultipartFile? file, }) async {
|
||||
final response = await uploadDatabaseBackupWithHttpInfo( file: file, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
}
|
||||
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
@@ -16,6 +16,102 @@ class MaintenanceAdminApi {
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> detectPriorInstallWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/detect-install';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
Future<MaintenanceDetectInstallResponseDto?> detectPriorInstall() async {
|
||||
final response = await detectPriorInstallWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceDetectInstallResponseDto',) as MaintenanceDetectInstallResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> getMaintenanceStatusWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/status';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
Future<MaintenanceStatusResponseDto?> getMaintenanceStatus() async {
|
||||
final response = await getMaintenanceStatusWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceStatusResponseDto',) as MaintenanceStatusResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Log into maintenance mode
|
||||
///
|
||||
/// Login with maintenance token or cookie to receive current information and perform further actions.
|
||||
|
||||
12
mobile/openapi/lib/api_client.dart
generated
12
mobile/openapi/lib/api_client.dart
generated
@@ -326,6 +326,10 @@ class ApiClient {
|
||||
return CreateProfileImageResponseDto.fromJson(value);
|
||||
case 'DatabaseBackupConfig':
|
||||
return DatabaseBackupConfig.fromJson(value);
|
||||
case 'DatabaseBackupDeleteDto':
|
||||
return DatabaseBackupDeleteDto.fromJson(value);
|
||||
case 'DatabaseBackupListResponseDto':
|
||||
return DatabaseBackupListResponseDto.fromJson(value);
|
||||
case 'DownloadArchiveInfo':
|
||||
return DownloadArchiveInfo.fromJson(value);
|
||||
case 'DownloadInfoDto':
|
||||
@@ -384,8 +388,14 @@ class ApiClient {
|
||||
return MaintenanceActionTypeTransformer().decode(value);
|
||||
case 'MaintenanceAuthDto':
|
||||
return MaintenanceAuthDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallResponseDto':
|
||||
return MaintenanceDetectInstallResponseDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallStorageFolderDto':
|
||||
return MaintenanceDetectInstallStorageFolderDto.fromJson(value);
|
||||
case 'MaintenanceLoginDto':
|
||||
return MaintenanceLoginDto.fromJson(value);
|
||||
case 'MaintenanceStatusResponseDto':
|
||||
return MaintenanceStatusResponseDto.fromJson(value);
|
||||
case 'ManualJobName':
|
||||
return ManualJobNameTypeTransformer().decode(value);
|
||||
case 'MapMarkerResponseDto':
|
||||
@@ -608,6 +618,8 @@ class ApiClient {
|
||||
return StackUpdateDto.fromJson(value);
|
||||
case 'StatisticsSearchDto':
|
||||
return StatisticsSearchDto.fromJson(value);
|
||||
case 'StorageFolder':
|
||||
return StorageFolderTypeTransformer().decode(value);
|
||||
case 'SyncAckDeleteDto':
|
||||
return SyncAckDeleteDto.fromJson(value);
|
||||
case 'SyncAckDto':
|
||||
|
||||
3
mobile/openapi/lib/api_helper.dart
generated
3
mobile/openapi/lib/api_helper.dart
generated
@@ -157,6 +157,9 @@ String parameterToString(dynamic value) {
|
||||
if (value is SourceType) {
|
||||
return SourceTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is StorageFolder) {
|
||||
return StorageFolderTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is SyncEntityType) {
|
||||
return SyncEntityTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
|
||||
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
Normal file
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
Normal file
@@ -0,0 +1,101 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupDeleteDto {
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance.
|
||||
DatabaseBackupDeleteDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<String> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupDeleteDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupDeleteDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupDeleteDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupDeleteDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupDeleteDto(
|
||||
backups: json[r'backups'] is Iterable
|
||||
? (json[r'backups'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupDeleteDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupDeleteDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupDeleteDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupDeleteDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupDeleteDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupDeleteDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupDeleteDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupDeleteDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
101
mobile/openapi/lib/model/database_backup_list_response_dto.dart
generated
Normal file
101
mobile/openapi/lib/model/database_backup_list_response_dto.dart
generated
Normal file
@@ -0,0 +1,101 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupListResponseDto {
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance.
|
||||
DatabaseBackupListResponseDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<String> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupListResponseDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupListResponseDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupListResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupListResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupListResponseDto(
|
||||
backups: json[r'backups'] is Iterable
|
||||
? (json[r'backups'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupListResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupListResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupListResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupListResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupListResponseDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupListResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupListResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupListResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
3
mobile/openapi/lib/model/maintenance_action.dart
generated
3
mobile/openapi/lib/model/maintenance_action.dart
generated
@@ -25,11 +25,13 @@ class MaintenanceAction {
|
||||
|
||||
static const start = MaintenanceAction._(r'start');
|
||||
static const end = MaintenanceAction._(r'end');
|
||||
static const restoreDatabase = MaintenanceAction._(r'restore_database');
|
||||
|
||||
/// List of all possible values in this [enum][MaintenanceAction].
|
||||
static const values = <MaintenanceAction>[
|
||||
start,
|
||||
end,
|
||||
restoreDatabase,
|
||||
];
|
||||
|
||||
static MaintenanceAction? fromJson(dynamic value) => MaintenanceActionTypeTransformer().decode(value);
|
||||
@@ -70,6 +72,7 @@ class MaintenanceActionTypeTransformer {
|
||||
switch (data) {
|
||||
case r'start': return MaintenanceAction.start;
|
||||
case r'end': return MaintenanceAction.end;
|
||||
case r'restore_database': return MaintenanceAction.restoreDatabase;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
||||
99
mobile/openapi/lib/model/maintenance_detect_install_response_dto.dart
generated
Normal file
99
mobile/openapi/lib/model/maintenance_detect_install_response_dto.dart
generated
Normal file
@@ -0,0 +1,99 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallResponseDto {
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance.
|
||||
MaintenanceDetectInstallResponseDto({
|
||||
this.storage = const [],
|
||||
});
|
||||
|
||||
List<MaintenanceDetectInstallStorageFolderDto> storage;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallResponseDto &&
|
||||
_deepEquality.equals(other.storage, storage);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(storage.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallResponseDto[storage=$storage]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'storage'] = this.storage;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallResponseDto(
|
||||
storage: MaintenanceDetectInstallStorageFolderDto.listFromJson(json[r'storage']),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'storage',
|
||||
};
|
||||
}
|
||||
|
||||
123
mobile/openapi/lib/model/maintenance_detect_install_storage_folder_dto.dart
generated
Normal file
123
mobile/openapi/lib/model/maintenance_detect_install_storage_folder_dto.dart
generated
Normal file
@@ -0,0 +1,123 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallStorageFolderDto {
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance.
|
||||
MaintenanceDetectInstallStorageFolderDto({
|
||||
required this.files,
|
||||
required this.folder,
|
||||
required this.readable,
|
||||
required this.writable,
|
||||
});
|
||||
|
||||
num files;
|
||||
|
||||
StorageFolder folder;
|
||||
|
||||
bool readable;
|
||||
|
||||
bool writable;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallStorageFolderDto &&
|
||||
other.files == files &&
|
||||
other.folder == folder &&
|
||||
other.readable == readable &&
|
||||
other.writable == writable;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(files.hashCode) +
|
||||
(folder.hashCode) +
|
||||
(readable.hashCode) +
|
||||
(writable.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallStorageFolderDto[files=$files, folder=$folder, readable=$readable, writable=$writable]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'files'] = this.files;
|
||||
json[r'folder'] = this.folder;
|
||||
json[r'readable'] = this.readable;
|
||||
json[r'writable'] = this.writable;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallStorageFolderDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallStorageFolderDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallStorageFolderDto(
|
||||
files: num.parse('${json[r'files']}'),
|
||||
folder: StorageFolder.fromJson(json[r'folder'])!,
|
||||
readable: mapValueOfType<bool>(json, r'readable')!,
|
||||
writable: mapValueOfType<bool>(json, r'writable')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallStorageFolderDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallStorageFolderDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallStorageFolderDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallStorageFolderDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallStorageFolderDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallStorageFolderDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallStorageFolderDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallStorageFolderDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'files',
|
||||
'folder',
|
||||
'readable',
|
||||
'writable',
|
||||
};
|
||||
}
|
||||
|
||||
158
mobile/openapi/lib/model/maintenance_status_response_dto.dart
generated
Normal file
158
mobile/openapi/lib/model/maintenance_status_response_dto.dart
generated
Normal file
@@ -0,0 +1,158 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceStatusResponseDto {
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance.
|
||||
MaintenanceStatusResponseDto({
|
||||
required this.action,
|
||||
required this.active,
|
||||
this.error,
|
||||
this.progress,
|
||||
this.task,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
bool active;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? error;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
num? progress;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? task;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceStatusResponseDto &&
|
||||
other.action == action &&
|
||||
other.active == active &&
|
||||
other.error == error &&
|
||||
other.progress == progress &&
|
||||
other.task == task;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode) +
|
||||
(active.hashCode) +
|
||||
(error == null ? 0 : error!.hashCode) +
|
||||
(progress == null ? 0 : progress!.hashCode) +
|
||||
(task == null ? 0 : task!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceStatusResponseDto[action=$action, active=$active, error=$error, progress=$progress, task=$task]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
json[r'active'] = this.active;
|
||||
if (this.error != null) {
|
||||
json[r'error'] = this.error;
|
||||
} else {
|
||||
// json[r'error'] = null;
|
||||
}
|
||||
if (this.progress != null) {
|
||||
json[r'progress'] = this.progress;
|
||||
} else {
|
||||
// json[r'progress'] = null;
|
||||
}
|
||||
if (this.task != null) {
|
||||
json[r'task'] = this.task;
|
||||
} else {
|
||||
// json[r'task'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceStatusResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceStatusResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceStatusResponseDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
active: mapValueOfType<bool>(json, r'active')!,
|
||||
error: mapValueOfType<String>(json, r'error'),
|
||||
progress: num.parse('${json[r'progress']}'),
|
||||
task: mapValueOfType<String>(json, r'task'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceStatusResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceStatusResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceStatusResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceStatusResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceStatusResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceStatusResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceStatusResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceStatusResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'action',
|
||||
'active',
|
||||
};
|
||||
}
|
||||
|
||||
12
mobile/openapi/lib/model/permission.dart
generated
12
mobile/openapi/lib/model/permission.dart
generated
@@ -58,6 +58,10 @@ class Permission {
|
||||
static const authPeriodChangePassword = Permission._(r'auth.changePassword');
|
||||
static const authDevicePeriodDelete = Permission._(r'authDevice.delete');
|
||||
static const archivePeriodRead = Permission._(r'archive.read');
|
||||
static const backupPeriodList = Permission._(r'backup.list');
|
||||
static const backupPeriodDownload = Permission._(r'backup.download');
|
||||
static const backupPeriodUpload = Permission._(r'backup.upload');
|
||||
static const backupPeriodDelete = Permission._(r'backup.delete');
|
||||
static const duplicatePeriodRead = Permission._(r'duplicate.read');
|
||||
static const duplicatePeriodDelete = Permission._(r'duplicate.delete');
|
||||
static const facePeriodCreate = Permission._(r'face.create');
|
||||
@@ -206,6 +210,10 @@ class Permission {
|
||||
authPeriodChangePassword,
|
||||
authDevicePeriodDelete,
|
||||
archivePeriodRead,
|
||||
backupPeriodList,
|
||||
backupPeriodDownload,
|
||||
backupPeriodUpload,
|
||||
backupPeriodDelete,
|
||||
duplicatePeriodRead,
|
||||
duplicatePeriodDelete,
|
||||
facePeriodCreate,
|
||||
@@ -389,6 +397,10 @@ class PermissionTypeTransformer {
|
||||
case r'auth.changePassword': return Permission.authPeriodChangePassword;
|
||||
case r'authDevice.delete': return Permission.authDevicePeriodDelete;
|
||||
case r'archive.read': return Permission.archivePeriodRead;
|
||||
case r'backup.list': return Permission.backupPeriodList;
|
||||
case r'backup.download': return Permission.backupPeriodDownload;
|
||||
case r'backup.upload': return Permission.backupPeriodUpload;
|
||||
case r'backup.delete': return Permission.backupPeriodDelete;
|
||||
case r'duplicate.read': return Permission.duplicatePeriodRead;
|
||||
case r'duplicate.delete': return Permission.duplicatePeriodDelete;
|
||||
case r'face.create': return Permission.facePeriodCreate;
|
||||
|
||||
@@ -14,25 +14,41 @@ class SetMaintenanceModeDto {
|
||||
/// Returns a new [SetMaintenanceModeDto] instance.
|
||||
SetMaintenanceModeDto({
|
||||
required this.action,
|
||||
this.restoreBackupFilename,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? restoreBackupFilename;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SetMaintenanceModeDto &&
|
||||
other.action == action;
|
||||
other.action == action &&
|
||||
other.restoreBackupFilename == restoreBackupFilename;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode);
|
||||
(action.hashCode) +
|
||||
(restoreBackupFilename == null ? 0 : restoreBackupFilename!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action]';
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action, restoreBackupFilename=$restoreBackupFilename]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
if (this.restoreBackupFilename != null) {
|
||||
json[r'restoreBackupFilename'] = this.restoreBackupFilename;
|
||||
} else {
|
||||
// json[r'restoreBackupFilename'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -46,6 +62,7 @@ class SetMaintenanceModeDto {
|
||||
|
||||
return SetMaintenanceModeDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
restoreBackupFilename: mapValueOfType<String>(json, r'restoreBackupFilename'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
97
mobile/openapi/lib/model/storage_folder.dart
generated
Normal file
97
mobile/openapi/lib/model/storage_folder.dart
generated
Normal file
@@ -0,0 +1,97 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class StorageFolder {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const StorageFolder._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const encodedVideo = StorageFolder._(r'encoded-video');
|
||||
static const library_ = StorageFolder._(r'library');
|
||||
static const upload = StorageFolder._(r'upload');
|
||||
static const profile = StorageFolder._(r'profile');
|
||||
static const thumbs = StorageFolder._(r'thumbs');
|
||||
static const backups = StorageFolder._(r'backups');
|
||||
|
||||
/// List of all possible values in this [enum][StorageFolder].
|
||||
static const values = <StorageFolder>[
|
||||
encodedVideo,
|
||||
library_,
|
||||
upload,
|
||||
profile,
|
||||
thumbs,
|
||||
backups,
|
||||
];
|
||||
|
||||
static StorageFolder? fromJson(dynamic value) => StorageFolderTypeTransformer().decode(value);
|
||||
|
||||
static List<StorageFolder> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <StorageFolder>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = StorageFolder.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [StorageFolder] to String,
|
||||
/// and [decode] dynamic data back to [StorageFolder].
|
||||
class StorageFolderTypeTransformer {
|
||||
factory StorageFolderTypeTransformer() => _instance ??= const StorageFolderTypeTransformer._();
|
||||
|
||||
const StorageFolderTypeTransformer._();
|
||||
|
||||
String encode(StorageFolder data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a StorageFolder.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
StorageFolder? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'encoded-video': return StorageFolder.encodedVideo;
|
||||
case r'library': return StorageFolder.library_;
|
||||
case r'upload': return StorageFolder.upload;
|
||||
case r'profile': return StorageFolder.profile;
|
||||
case r'thumbs': return StorageFolder.thumbs;
|
||||
case r'backups': return StorageFolder.backups;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [StorageFolderTypeTransformer] instance.
|
||||
static StorageFolderTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ class WorkflowActionItemDto {
|
||||
/// Returns a new [WorkflowActionItemDto] instance.
|
||||
WorkflowActionItemDto({
|
||||
this.actionConfig,
|
||||
required this.actionId,
|
||||
required this.pluginActionId,
|
||||
});
|
||||
|
||||
///
|
||||
@@ -25,21 +25,21 @@ class WorkflowActionItemDto {
|
||||
///
|
||||
Object? actionConfig;
|
||||
|
||||
String actionId;
|
||||
String pluginActionId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowActionItemDto &&
|
||||
other.actionConfig == actionConfig &&
|
||||
other.actionId == actionId;
|
||||
other.pluginActionId == pluginActionId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(actionConfig == null ? 0 : actionConfig!.hashCode) +
|
||||
(actionId.hashCode);
|
||||
(pluginActionId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowActionItemDto[actionConfig=$actionConfig, actionId=$actionId]';
|
||||
String toString() => 'WorkflowActionItemDto[actionConfig=$actionConfig, pluginActionId=$pluginActionId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -48,7 +48,7 @@ class WorkflowActionItemDto {
|
||||
} else {
|
||||
// json[r'actionConfig'] = null;
|
||||
}
|
||||
json[r'actionId'] = this.actionId;
|
||||
json[r'pluginActionId'] = this.pluginActionId;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ class WorkflowActionItemDto {
|
||||
|
||||
return WorkflowActionItemDto(
|
||||
actionConfig: mapValueOfType<Object>(json, r'actionConfig'),
|
||||
actionId: mapValueOfType<String>(json, r'actionId')!,
|
||||
pluginActionId: mapValueOfType<String>(json, r'pluginActionId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -110,7 +110,7 @@ class WorkflowActionItemDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'actionId',
|
||||
'pluginActionId',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -14,41 +14,41 @@ class WorkflowActionResponseDto {
|
||||
/// Returns a new [WorkflowActionResponseDto] instance.
|
||||
WorkflowActionResponseDto({
|
||||
required this.actionConfig,
|
||||
required this.actionId,
|
||||
required this.id,
|
||||
required this.order,
|
||||
required this.pluginActionId,
|
||||
required this.workflowId,
|
||||
});
|
||||
|
||||
Object? actionConfig;
|
||||
|
||||
String actionId;
|
||||
|
||||
String id;
|
||||
|
||||
num order;
|
||||
|
||||
String pluginActionId;
|
||||
|
||||
String workflowId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowActionResponseDto &&
|
||||
other.actionConfig == actionConfig &&
|
||||
other.actionId == actionId &&
|
||||
other.id == id &&
|
||||
other.order == order &&
|
||||
other.pluginActionId == pluginActionId &&
|
||||
other.workflowId == workflowId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(actionConfig == null ? 0 : actionConfig!.hashCode) +
|
||||
(actionId.hashCode) +
|
||||
(id.hashCode) +
|
||||
(order.hashCode) +
|
||||
(pluginActionId.hashCode) +
|
||||
(workflowId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowActionResponseDto[actionConfig=$actionConfig, actionId=$actionId, id=$id, order=$order, workflowId=$workflowId]';
|
||||
String toString() => 'WorkflowActionResponseDto[actionConfig=$actionConfig, id=$id, order=$order, pluginActionId=$pluginActionId, workflowId=$workflowId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -57,9 +57,9 @@ class WorkflowActionResponseDto {
|
||||
} else {
|
||||
// json[r'actionConfig'] = null;
|
||||
}
|
||||
json[r'actionId'] = this.actionId;
|
||||
json[r'id'] = this.id;
|
||||
json[r'order'] = this.order;
|
||||
json[r'pluginActionId'] = this.pluginActionId;
|
||||
json[r'workflowId'] = this.workflowId;
|
||||
return json;
|
||||
}
|
||||
@@ -74,9 +74,9 @@ class WorkflowActionResponseDto {
|
||||
|
||||
return WorkflowActionResponseDto(
|
||||
actionConfig: mapValueOfType<Object>(json, r'actionConfig'),
|
||||
actionId: mapValueOfType<String>(json, r'actionId')!,
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
order: num.parse('${json[r'order']}'),
|
||||
pluginActionId: mapValueOfType<String>(json, r'pluginActionId')!,
|
||||
workflowId: mapValueOfType<String>(json, r'workflowId')!,
|
||||
);
|
||||
}
|
||||
@@ -126,9 +126,9 @@ class WorkflowActionResponseDto {
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'actionConfig',
|
||||
'actionId',
|
||||
'id',
|
||||
'order',
|
||||
'pluginActionId',
|
||||
'workflowId',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ class WorkflowFilterItemDto {
|
||||
/// Returns a new [WorkflowFilterItemDto] instance.
|
||||
WorkflowFilterItemDto({
|
||||
this.filterConfig,
|
||||
required this.filterId,
|
||||
required this.pluginFilterId,
|
||||
});
|
||||
|
||||
///
|
||||
@@ -25,21 +25,21 @@ class WorkflowFilterItemDto {
|
||||
///
|
||||
Object? filterConfig;
|
||||
|
||||
String filterId;
|
||||
String pluginFilterId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowFilterItemDto &&
|
||||
other.filterConfig == filterConfig &&
|
||||
other.filterId == filterId;
|
||||
other.pluginFilterId == pluginFilterId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(filterConfig == null ? 0 : filterConfig!.hashCode) +
|
||||
(filterId.hashCode);
|
||||
(pluginFilterId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowFilterItemDto[filterConfig=$filterConfig, filterId=$filterId]';
|
||||
String toString() => 'WorkflowFilterItemDto[filterConfig=$filterConfig, pluginFilterId=$pluginFilterId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -48,7 +48,7 @@ class WorkflowFilterItemDto {
|
||||
} else {
|
||||
// json[r'filterConfig'] = null;
|
||||
}
|
||||
json[r'filterId'] = this.filterId;
|
||||
json[r'pluginFilterId'] = this.pluginFilterId;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ class WorkflowFilterItemDto {
|
||||
|
||||
return WorkflowFilterItemDto(
|
||||
filterConfig: mapValueOfType<Object>(json, r'filterConfig'),
|
||||
filterId: mapValueOfType<String>(json, r'filterId')!,
|
||||
pluginFilterId: mapValueOfType<String>(json, r'pluginFilterId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -110,7 +110,7 @@ class WorkflowFilterItemDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'filterId',
|
||||
'pluginFilterId',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -14,41 +14,41 @@ class WorkflowFilterResponseDto {
|
||||
/// Returns a new [WorkflowFilterResponseDto] instance.
|
||||
WorkflowFilterResponseDto({
|
||||
required this.filterConfig,
|
||||
required this.filterId,
|
||||
required this.id,
|
||||
required this.order,
|
||||
required this.pluginFilterId,
|
||||
required this.workflowId,
|
||||
});
|
||||
|
||||
Object? filterConfig;
|
||||
|
||||
String filterId;
|
||||
|
||||
String id;
|
||||
|
||||
num order;
|
||||
|
||||
String pluginFilterId;
|
||||
|
||||
String workflowId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowFilterResponseDto &&
|
||||
other.filterConfig == filterConfig &&
|
||||
other.filterId == filterId &&
|
||||
other.id == id &&
|
||||
other.order == order &&
|
||||
other.pluginFilterId == pluginFilterId &&
|
||||
other.workflowId == workflowId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(filterConfig == null ? 0 : filterConfig!.hashCode) +
|
||||
(filterId.hashCode) +
|
||||
(id.hashCode) +
|
||||
(order.hashCode) +
|
||||
(pluginFilterId.hashCode) +
|
||||
(workflowId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowFilterResponseDto[filterConfig=$filterConfig, filterId=$filterId, id=$id, order=$order, workflowId=$workflowId]';
|
||||
String toString() => 'WorkflowFilterResponseDto[filterConfig=$filterConfig, id=$id, order=$order, pluginFilterId=$pluginFilterId, workflowId=$workflowId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -57,9 +57,9 @@ class WorkflowFilterResponseDto {
|
||||
} else {
|
||||
// json[r'filterConfig'] = null;
|
||||
}
|
||||
json[r'filterId'] = this.filterId;
|
||||
json[r'id'] = this.id;
|
||||
json[r'order'] = this.order;
|
||||
json[r'pluginFilterId'] = this.pluginFilterId;
|
||||
json[r'workflowId'] = this.workflowId;
|
||||
return json;
|
||||
}
|
||||
@@ -74,9 +74,9 @@ class WorkflowFilterResponseDto {
|
||||
|
||||
return WorkflowFilterResponseDto(
|
||||
filterConfig: mapValueOfType<Object>(json, r'filterConfig'),
|
||||
filterId: mapValueOfType<String>(json, r'filterId')!,
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
order: num.parse('${json[r'order']}'),
|
||||
pluginFilterId: mapValueOfType<String>(json, r'pluginFilterId')!,
|
||||
workflowId: mapValueOfType<String>(json, r'workflowId')!,
|
||||
);
|
||||
}
|
||||
@@ -126,9 +126,9 @@ class WorkflowFilterResponseDto {
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'filterConfig',
|
||||
'filterId',
|
||||
'id',
|
||||
'order',
|
||||
'pluginFilterId',
|
||||
'workflowId',
|
||||
};
|
||||
}
|
||||
|
||||
278
mobile/test/utils/timezone_test.dart
Normal file
278
mobile/test/utils/timezone_test.dart
Normal file
@@ -0,0 +1,278 @@
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:timezone/data/latest.dart' as tz;
|
||||
|
||||
void main() {
|
||||
setUpAll(() {
|
||||
tz.initializeTimeZones();
|
||||
});
|
||||
|
||||
group('applyTimezoneOffset', () {
|
||||
group('with named timezone locations', () {
|
||||
test('should convert UTC to Asia/Hong_Kong (+08:00)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Hong_Kong',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20); // 12:00 UTC + 8 hours = 20:00
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should convert UTC to America/New_York (handles DST)', () {
|
||||
// Summer time (EDT = UTC-4)
|
||||
final summerUtc = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
final (summerTime, summerOffset) = applyTimezoneOffset(
|
||||
dateTime: summerUtc,
|
||||
timeZone: 'America/New_York',
|
||||
);
|
||||
|
||||
expect(summerTime.hour, 8); // 12:00 UTC - 4 hours = 08:00
|
||||
expect(summerOffset, const Duration(hours: -4));
|
||||
|
||||
// Winter time (EST = UTC-5)
|
||||
final winterUtc = DateTime.utc(2024, 1, 15, 12, 0, 0);
|
||||
final (winterTime, winterOffset) = applyTimezoneOffset(
|
||||
dateTime: winterUtc,
|
||||
timeZone: 'America/New_York',
|
||||
);
|
||||
|
||||
expect(winterTime.hour, 7); // 12:00 UTC - 5 hours = 07:00
|
||||
expect(winterOffset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should convert UTC to Europe/London', () {
|
||||
// Winter (GMT = UTC+0)
|
||||
final winterUtc = DateTime.utc(2024, 1, 15, 12, 0, 0);
|
||||
final (winterTime, winterOffset) = applyTimezoneOffset(
|
||||
dateTime: winterUtc,
|
||||
timeZone: 'Europe/London',
|
||||
);
|
||||
|
||||
expect(winterTime.hour, 12);
|
||||
expect(winterOffset, Duration.zero);
|
||||
|
||||
// Summer (BST = UTC+1)
|
||||
final summerUtc = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
final (summerTime, summerOffset) = applyTimezoneOffset(
|
||||
dateTime: summerUtc,
|
||||
timeZone: 'Europe/London',
|
||||
);
|
||||
|
||||
expect(summerTime.hour, 13);
|
||||
expect(summerOffset, const Duration(hours: 1));
|
||||
});
|
||||
|
||||
test('should handle timezone with 30-minute offset (Asia/Kolkata)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Kolkata',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 30); // 12:00 UTC + 5:30 = 17:30
|
||||
expect(offset, const Duration(hours: 5, minutes: 30));
|
||||
});
|
||||
|
||||
test('should handle timezone with 45-minute offset (Asia/Kathmandu)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Kathmandu',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 45); // 12:00 UTC + 5:45 = 17:45
|
||||
expect(offset, const Duration(hours: 5, minutes: 45));
|
||||
});
|
||||
});
|
||||
|
||||
group('with UTC offset format', () {
|
||||
test('should handle UTC+08:00 format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+08:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC-05:00 format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC-05:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 7);
|
||||
expect(offset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should handle UTC+8 format (without minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+8',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC-5 format (without minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC-5',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 7);
|
||||
expect(offset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should handle plain UTC format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 12);
|
||||
expect(offset, Duration.zero);
|
||||
});
|
||||
|
||||
test('should handle lowercase utc format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'utc+08:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC+05:30 format (with minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+05:30',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 30);
|
||||
expect(offset, const Duration(hours: 5, minutes: 30));
|
||||
});
|
||||
});
|
||||
|
||||
group('with null or invalid timezone', () {
|
||||
test('should return UTC time when timezone is null', () {
|
||||
final localTime = DateTime(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: localTime,
|
||||
timeZone: null,
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(offset, adjustedTime.timeZoneOffset);
|
||||
});
|
||||
|
||||
test('should return UTC time when timezone is invalid', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Invalid/Timezone',
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(adjustedTime.hour, 12);
|
||||
expect(offset, adjustedTime.timeZoneOffset);
|
||||
});
|
||||
|
||||
test('should return UTC time when UTC offset format is malformed', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC++08',
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(adjustedTime.hour, 12);
|
||||
});
|
||||
});
|
||||
|
||||
group('edge cases', () {
|
||||
test('should handle date crossing midnight forward', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 20, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Tokyo', // UTC+9
|
||||
);
|
||||
|
||||
expect(adjustedTime.day, 16); // Crosses to next day
|
||||
expect(adjustedTime.hour, 5); // 20:00 UTC + 9 = 05:00 next day
|
||||
expect(offset, const Duration(hours: 9));
|
||||
});
|
||||
|
||||
test('should handle date crossing midnight backward', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 3, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'America/Los_Angeles', // UTC-7 in summer
|
||||
);
|
||||
|
||||
expect(adjustedTime.day, 14); // Crosses to previous day
|
||||
expect(adjustedTime.hour, 20); // 03:00 UTC - 7 = 20:00 previous day
|
||||
expect(offset, const Duration(hours: -7));
|
||||
});
|
||||
|
||||
test('should handle year boundary crossing', () {
|
||||
final utcTime = DateTime.utc(2024, 1, 1, 2, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'America/New_York', // UTC-5 in winter
|
||||
);
|
||||
|
||||
expect(adjustedTime.year, 2023);
|
||||
expect(adjustedTime.month, 12);
|
||||
expect(adjustedTime.day, 31);
|
||||
expect(adjustedTime.hour, 21); // 02:00 UTC - 5 = 21:00 Dec 31
|
||||
});
|
||||
|
||||
test('should convert local time to UTC before applying timezone', () {
|
||||
// Create a local time (not UTC)
|
||||
final localTime = DateTime(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, _) = applyTimezoneOffset(
|
||||
dateTime: localTime,
|
||||
timeZone: 'Asia/Hong_Kong',
|
||||
);
|
||||
|
||||
// The function converts to UTC first, then applies timezone
|
||||
// So local 12:00 -> UTC (depends on local timezone) -> HK time
|
||||
// We can verify it's working by checking it's a TZDateTime
|
||||
expect(adjustedTime, isNotNull);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -322,6 +322,237 @@
|
||||
"x-immich-state": "Stable"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups": {
|
||||
"delete": {
|
||||
"description": "Delete a backup by its filename",
|
||||
"operationId": "deleteDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupDeleteDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Delete database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.delete",
|
||||
"x-immich-state": "Alpha"
|
||||
},
|
||||
"get": {
|
||||
"description": "Get the list of the successful and failed backups",
|
||||
"operationId": "listDatabaseBackups",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupListResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "List database backups",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/start-restore": {
|
||||
"post": {
|
||||
"description": "Put Immich into maintenance mode to restore a backup (Immich must not be configured)",
|
||||
"operationId": "startDatabaseRestoreFlow",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Start database backup restore flow",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/upload": {
|
||||
"post": {
|
||||
"description": "Uploads .sql/.sql.gz file to restore backup from",
|
||||
"operationId": "uploadDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupUploadDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Backup Upload",
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Upload database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.upload",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/{filename}": {
|
||||
"get": {
|
||||
"description": "Downloads the database backup file",
|
||||
"operationId": "downloadDatabaseBackup",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "filename",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "string",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Download database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.download",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance": {
|
||||
"post": {
|
||||
"description": "Put Immich into or take it out of maintenance mode",
|
||||
@@ -372,6 +603,53 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/detect-install": {
|
||||
"get": {
|
||||
"description": "Collect integrity checks and other heuristics about local data.",
|
||||
"operationId": "detectPriorInstall",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Detect existing install",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/login": {
|
||||
"post": {
|
||||
"description": "Login with maintenance token or cookie to receive current information and perform further actions.",
|
||||
@@ -416,6 +694,40 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/status": {
|
||||
"get": {
|
||||
"description": "Fetch information about the currently running maintenance action.",
|
||||
"operationId": "getMaintenanceStatus",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceStatusResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Get maintenance mode status",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/notifications": {
|
||||
"post": {
|
||||
"description": "Create a new notification for a specific user.",
|
||||
@@ -14296,6 +14608,10 @@
|
||||
"name": "Authentication (admin)",
|
||||
"description": "Administrative endpoints related to authentication."
|
||||
},
|
||||
{
|
||||
"name": "Database Backups (admin)",
|
||||
"description": "Manage backups of the Immich database."
|
||||
},
|
||||
{
|
||||
"name": "Deprecated",
|
||||
"description": "Deprecated endpoints that are planned for removal in the next major release."
|
||||
@@ -16233,6 +16549,43 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupDeleteDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupListResponseDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupUploadDto": {
|
||||
"properties": {
|
||||
"file": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"DownloadArchiveInfo": {
|
||||
"properties": {
|
||||
"assetIds": {
|
||||
@@ -16899,7 +17252,8 @@
|
||||
"MaintenanceAction": {
|
||||
"enum": [
|
||||
"start",
|
||||
"end"
|
||||
"end",
|
||||
"restore_database"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -16914,6 +17268,47 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallResponseDto": {
|
||||
"properties": {
|
||||
"storage": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallStorageFolderDto"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"storage"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallStorageFolderDto": {
|
||||
"properties": {
|
||||
"files": {
|
||||
"type": "number"
|
||||
},
|
||||
"folder": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/StorageFolder"
|
||||
}
|
||||
]
|
||||
},
|
||||
"readable": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"writable": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"folder",
|
||||
"readable",
|
||||
"writable"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceLoginDto": {
|
||||
"properties": {
|
||||
"token": {
|
||||
@@ -16922,6 +17317,34 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceStatusResponseDto": {
|
||||
"properties": {
|
||||
"action": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"active": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"error": {
|
||||
"type": "string"
|
||||
},
|
||||
"progress": {
|
||||
"type": "number"
|
||||
},
|
||||
"task": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"action",
|
||||
"active"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManualJobName": {
|
||||
"enum": [
|
||||
"person-cleanup",
|
||||
@@ -17862,6 +18285,10 @@
|
||||
"auth.changePassword",
|
||||
"authDevice.delete",
|
||||
"archive.read",
|
||||
"backup.list",
|
||||
"backup.download",
|
||||
"backup.upload",
|
||||
"backup.delete",
|
||||
"duplicate.read",
|
||||
"duplicate.delete",
|
||||
"face.create",
|
||||
@@ -19600,6 +20027,9 @@
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"restoreBackupFilename": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -20172,6 +20602,17 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"StorageFolder": {
|
||||
"enum": [
|
||||
"encoded-video",
|
||||
"library",
|
||||
"upload",
|
||||
"profile",
|
||||
"thumbs",
|
||||
"backups"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SyncAckDeleteDto": {
|
||||
"properties": {
|
||||
"types": {
|
||||
@@ -23162,13 +23603,13 @@
|
||||
"actionConfig": {
|
||||
"type": "object"
|
||||
},
|
||||
"actionId": {
|
||||
"pluginActionId": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"actionId"
|
||||
"pluginActionId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -23178,24 +23619,24 @@
|
||||
"nullable": true,
|
||||
"type": "object"
|
||||
},
|
||||
"actionId": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"order": {
|
||||
"type": "number"
|
||||
},
|
||||
"pluginActionId": {
|
||||
"type": "string"
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"actionConfig",
|
||||
"actionId",
|
||||
"id",
|
||||
"order",
|
||||
"pluginActionId",
|
||||
"workflowId"
|
||||
],
|
||||
"type": "object"
|
||||
@@ -23244,13 +23685,13 @@
|
||||
"filterConfig": {
|
||||
"type": "object"
|
||||
},
|
||||
"filterId": {
|
||||
"pluginFilterId": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filterId"
|
||||
"pluginFilterId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -23260,24 +23701,24 @@
|
||||
"nullable": true,
|
||||
"type": "object"
|
||||
},
|
||||
"filterId": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"order": {
|
||||
"type": "number"
|
||||
},
|
||||
"pluginFilterId": {
|
||||
"type": "string"
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filterConfig",
|
||||
"filterId",
|
||||
"id",
|
||||
"order",
|
||||
"pluginFilterId",
|
||||
"workflowId"
|
||||
],
|
||||
"type": "object"
|
||||
|
||||
@@ -40,8 +40,27 @@ export type ActivityStatisticsResponseDto = {
|
||||
comments: number;
|
||||
likes: number;
|
||||
};
|
||||
export type DatabaseBackupDeleteDto = {
|
||||
backups: string[];
|
||||
};
|
||||
export type DatabaseBackupListResponseDto = {
|
||||
backups: string[];
|
||||
};
|
||||
export type DatabaseBackupUploadDto = {
|
||||
file?: Blob;
|
||||
};
|
||||
export type SetMaintenanceModeDto = {
|
||||
action: MaintenanceAction;
|
||||
restoreBackupFilename?: string;
|
||||
};
|
||||
export type MaintenanceDetectInstallStorageFolderDto = {
|
||||
files: number;
|
||||
folder: StorageFolder;
|
||||
readable: boolean;
|
||||
writable: boolean;
|
||||
};
|
||||
export type MaintenanceDetectInstallResponseDto = {
|
||||
storage: MaintenanceDetectInstallStorageFolderDto[];
|
||||
};
|
||||
export type MaintenanceLoginDto = {
|
||||
token?: string;
|
||||
@@ -49,6 +68,13 @@ export type MaintenanceLoginDto = {
|
||||
export type MaintenanceAuthDto = {
|
||||
username: string;
|
||||
};
|
||||
export type MaintenanceStatusResponseDto = {
|
||||
action: MaintenanceAction;
|
||||
active: boolean;
|
||||
error?: string;
|
||||
progress?: number;
|
||||
task?: string;
|
||||
};
|
||||
export type NotificationCreateDto = {
|
||||
data?: object;
|
||||
description?: string | null;
|
||||
@@ -1729,16 +1755,16 @@ export type CreateProfileImageResponseDto = {
|
||||
};
|
||||
export type WorkflowActionResponseDto = {
|
||||
actionConfig: object | null;
|
||||
actionId: string;
|
||||
id: string;
|
||||
order: number;
|
||||
pluginActionId: string;
|
||||
workflowId: string;
|
||||
};
|
||||
export type WorkflowFilterResponseDto = {
|
||||
filterConfig: object | null;
|
||||
filterId: string;
|
||||
id: string;
|
||||
order: number;
|
||||
pluginFilterId: string;
|
||||
workflowId: string;
|
||||
};
|
||||
export type WorkflowResponseDto = {
|
||||
@@ -1754,11 +1780,11 @@ export type WorkflowResponseDto = {
|
||||
};
|
||||
export type WorkflowActionItemDto = {
|
||||
actionConfig?: object;
|
||||
actionId: string;
|
||||
pluginActionId: string;
|
||||
};
|
||||
export type WorkflowFilterItemDto = {
|
||||
filterConfig?: object;
|
||||
filterId: string;
|
||||
pluginFilterId: string;
|
||||
};
|
||||
export type WorkflowCreateDto = {
|
||||
actions: WorkflowActionItemDto[];
|
||||
@@ -1850,6 +1876,63 @@ export function unlinkAllOAuthAccountsAdmin(opts?: Oazapfts.RequestOpts) {
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Delete database backup
|
||||
*/
|
||||
export function deleteDatabaseBackup({ databaseBackupDeleteDto }: {
|
||||
databaseBackupDeleteDto: DatabaseBackupDeleteDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups", oazapfts.json({
|
||||
...opts,
|
||||
method: "DELETE",
|
||||
body: databaseBackupDeleteDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* List database backups
|
||||
*/
|
||||
export function listDatabaseBackups(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: DatabaseBackupListResponseDto;
|
||||
}>("/admin/database-backups", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Start database backup restore flow
|
||||
*/
|
||||
export function startDatabaseRestoreFlow(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/start-restore", {
|
||||
...opts,
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Upload database backup
|
||||
*/
|
||||
export function uploadDatabaseBackup({ databaseBackupUploadDto }: {
|
||||
databaseBackupUploadDto: DatabaseBackupUploadDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/upload", oazapfts.multipart({
|
||||
...opts,
|
||||
method: "POST",
|
||||
body: databaseBackupUploadDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Download database backup
|
||||
*/
|
||||
export function downloadDatabaseBackup({ filename }: {
|
||||
filename: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchBlob<{
|
||||
status: 200;
|
||||
data: Blob;
|
||||
}>(`/admin/database-backups/${encodeURIComponent(filename)}`, {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Set maintenance mode
|
||||
*/
|
||||
@@ -1862,6 +1945,17 @@ export function setMaintenanceMode({ setMaintenanceModeDto }: {
|
||||
body: setMaintenanceModeDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Detect existing install
|
||||
*/
|
||||
export function detectPriorInstall(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceDetectInstallResponseDto;
|
||||
}>("/admin/maintenance/detect-install", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Log into maintenance mode
|
||||
*/
|
||||
@@ -1877,6 +1971,17 @@ export function maintenanceLogin({ maintenanceLoginDto }: {
|
||||
body: maintenanceLoginDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Get maintenance mode status
|
||||
*/
|
||||
export function getMaintenanceStatus(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceStatusResponseDto;
|
||||
}>("/admin/maintenance/status", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Create a notification
|
||||
*/
|
||||
@@ -5140,7 +5245,16 @@ export enum UserAvatarColor {
|
||||
}
|
||||
export enum MaintenanceAction {
|
||||
Start = "start",
|
||||
End = "end"
|
||||
End = "end",
|
||||
RestoreDatabase = "restore_database"
|
||||
}
|
||||
export enum StorageFolder {
|
||||
EncodedVideo = "encoded-video",
|
||||
Library = "library",
|
||||
Upload = "upload",
|
||||
Profile = "profile",
|
||||
Thumbs = "thumbs",
|
||||
Backups = "backups"
|
||||
}
|
||||
export enum NotificationLevel {
|
||||
Success = "success",
|
||||
@@ -5234,6 +5348,10 @@ export enum Permission {
|
||||
AuthChangePassword = "auth.changePassword",
|
||||
AuthDeviceDelete = "authDevice.delete",
|
||||
ArchiveRead = "archive.read",
|
||||
BackupList = "backup.list",
|
||||
BackupDownload = "backup.download",
|
||||
BackupUpload = "backup.upload",
|
||||
BackupDelete = "backup.delete",
|
||||
DuplicateRead = "duplicate.read",
|
||||
DuplicateDelete = "duplicate.delete",
|
||||
FaceCreate = "face.create",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"version": "0.0.1",
|
||||
"description": "Monorepo for Immich",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.22.0+sha512.bf049efe995b28f527fd2b41ae0474ce29186f7edcb3bf545087bd61fbbebb2bf75362d1307fda09c2d288e1e499787ac12d4fcb617a974718a6051f2eee741c",
|
||||
"packageManager": "pnpm@10.24.0+sha512.01ff8ae71b4419903b65c60fb2dc9d34cf8bb6e06d03bde112ef38f7a34d6904c424ba66bea5cdcf12890230bf39f9580473140ed9c946fef328b6e5238a345a",
|
||||
"engines": {
|
||||
"pnpm": ">=10.0.0"
|
||||
}
|
||||
|
||||
1472
pnpm-lock.yaml
generated
1472
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -45,14 +45,14 @@
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.54.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.60.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.56.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.61.0",
|
||||
"@opentelemetry/resources": "^2.0.1",
|
||||
"@opentelemetry/sdk-metrics": "^2.0.1",
|
||||
"@opentelemetry/sdk-node": "^0.207.0",
|
||||
"@opentelemetry/sdk-node": "^0.208.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.34.0",
|
||||
"@react-email/components": "^0.5.0",
|
||||
"@react-email/render": "^1.1.2",
|
||||
|
||||
@@ -21,8 +21,11 @@ import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
|
||||
import { repositories } from 'src/repositories';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
@@ -103,6 +106,9 @@ export class ApiModule extends BaseModule {}
|
||||
providers: [
|
||||
ConfigRepository,
|
||||
LoggingRepository,
|
||||
StorageRepository,
|
||||
ProcessRepository,
|
||||
DatabaseRepository,
|
||||
SystemMetadataRepository,
|
||||
AppRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
@@ -116,9 +122,14 @@ export class MaintenanceModule {
|
||||
constructor(
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private maintenanceWorkerService: MaintenanceWorkerService,
|
||||
) {
|
||||
logger.setAppName(this.worker);
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.maintenanceWorkerService.init();
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -141,6 +141,7 @@ export const endpointTags: Record<ApiTag, string> = {
|
||||
[ApiTag.Assets]: 'An asset is an image or video that has been uploaded to Immich.',
|
||||
[ApiTag.Authentication]: 'Endpoints related to user authentication, including OAuth.',
|
||||
[ApiTag.AuthenticationAdmin]: 'Administrative endpoints related to authentication.',
|
||||
[ApiTag.DatabaseBackups]: 'Manage backups of the Immich database.',
|
||||
[ApiTag.Deprecated]: 'Deprecated endpoints that are planned for removal in the next major release.',
|
||||
[ApiTag.Download]: 'Endpoints for downloading assets or collections of assets.',
|
||||
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
|
||||
|
||||
101
server/src/controllers/database-backup.controller.ts
Normal file
101
server/src/controllers/database-backup.controller.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { Body, Controller, Delete, Get, Next, Param, Post, Res, UploadedFile, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { ApiBody, ApiConsumes, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
DatabaseBackupDeleteDto,
|
||||
DatabaseBackupListResponseDto,
|
||||
DatabaseBackupUploadDto,
|
||||
} from 'src/dtos/database-backup.dto';
|
||||
import { ApiTag, ImmichCookie, Permission } from 'src/enum';
|
||||
import { Authenticated, FileResponse, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags(ApiTag.DatabaseBackups)
|
||||
@Controller('admin/database-backups')
|
||||
export class DatabaseBackupController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: DatabaseBackupService,
|
||||
private maintenanceService: MaintenanceService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@Endpoint({
|
||||
summary: 'List database backups',
|
||||
description: 'Get the list of the successful and failed backups',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
@Get(':filename')
|
||||
@FileResponse()
|
||||
@Endpoint({
|
||||
summary: 'Download database backup',
|
||||
description: 'Downloads the database backup file',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDownload, admin: true })
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
): Promise<void> {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
@Delete()
|
||||
@Endpoint({
|
||||
summary: 'Delete database backup',
|
||||
description: 'Delete a backup by its filename',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDelete, admin: true })
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
@Post('start-restore')
|
||||
@Endpoint({
|
||||
summary: 'Start database backup restore flow',
|
||||
description: 'Put Immich into maintenance mode to restore a backup (Immich must not be configured)',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
async startDatabaseRestoreFlow(
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
const { jwt } = await this.maintenanceService.startRestoreFlow();
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
});
|
||||
}
|
||||
|
||||
@Post('upload')
|
||||
@Authenticated({ permission: Permission.BackupUpload, admin: true })
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiBody({ description: 'Backup Upload', type: DatabaseBackupUploadDto })
|
||||
@Endpoint({
|
||||
summary: 'Upload database backup',
|
||||
description: 'Uploads .sql/.sql.gz file to restore backup from',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
import { DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import { DownloadController } from 'src/controllers/download.controller';
|
||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||
import { FaceController } from 'src/controllers/face.controller';
|
||||
@@ -46,6 +47,7 @@ export const controllers = [
|
||||
AssetMediaController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DatabaseBackupController,
|
||||
DownloadController,
|
||||
DuplicateController,
|
||||
FaceController,
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { BadRequestException, Body, Controller, Post, Res } from '@nestjs/common';
|
||||
import { BadRequestException, Body, Controller, Get, Post, Res } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ApiTag, ImmichCookie, MaintenanceAction, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
@@ -15,6 +21,27 @@ import { respondWithCookie } from 'src/utils/response';
|
||||
export class MaintenanceController {
|
||||
constructor(private service: MaintenanceService) {}
|
||||
|
||||
@Get('status')
|
||||
@Endpoint({
|
||||
summary: 'Get maintenance mode status',
|
||||
description: 'Fetch information about the currently running maintenance action.',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return this.service.getMaintenanceStatus();
|
||||
}
|
||||
|
||||
@Get('detect-install')
|
||||
@Endpoint({
|
||||
summary: 'Detect existing install',
|
||||
description: 'Collect integrity checks and other heuristics about local data.',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@Endpoint({
|
||||
summary: 'Log into maintenance mode',
|
||||
@@ -38,8 +65,8 @@ export class MaintenanceController {
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.Start) {
|
||||
const { jwt } = await this.service.startMaintenance(auth.user.name);
|
||||
if (dto.action !== MaintenanceAction.End) {
|
||||
const { jwt } = await this.service.startMaintenance(dto, auth.user.name);
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
|
||||
@@ -305,7 +305,7 @@ export class StorageCore {
|
||||
return this.assetRepository.update({ id, encodedVideoPath: newPath });
|
||||
}
|
||||
case AssetPathType.Sidecar: {
|
||||
return this.assetRepository.update({ id, sidecarPath: newPath });
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
|
||||
}
|
||||
case PersonPathType.Face: {
|
||||
return this.personRepository.update({ id, thumbnailPath: newPath });
|
||||
|
||||
@@ -122,7 +122,6 @@ export type Asset = {
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
@@ -156,13 +155,6 @@ export type StorageAsset = {
|
||||
encodedVideoPath: string | null;
|
||||
};
|
||||
|
||||
export type SidecarWriteAsset = {
|
||||
id: string;
|
||||
sidecarPath: string | null;
|
||||
originalPath: string;
|
||||
tags: Array<{ value: string }>;
|
||||
};
|
||||
|
||||
export type Stack = {
|
||||
id: string;
|
||||
primaryAssetId: string;
|
||||
@@ -309,14 +301,14 @@ export type Workflow = Selectable<WorkflowTable> & {
|
||||
|
||||
export type WorkflowFilter = Selectable<WorkflowFilterTable> & {
|
||||
workflowId: string;
|
||||
filterId: string;
|
||||
pluginFilterId: string;
|
||||
filterConfig: FilterConfig | null;
|
||||
order: number;
|
||||
};
|
||||
|
||||
export type WorkflowAction = Selectable<WorkflowActionTable> & {
|
||||
workflowId: string;
|
||||
actionId: string;
|
||||
pluginActionId: string;
|
||||
actionConfig: ActionConfig | null;
|
||||
order: number;
|
||||
};
|
||||
@@ -347,7 +339,6 @@ export const columns = {
|
||||
'asset.originalFileName',
|
||||
'asset.originalPath',
|
||||
'asset.ownerId',
|
||||
'asset.sidecarPath',
|
||||
'asset.type',
|
||||
],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
|
||||
@@ -124,7 +124,6 @@ export type MapAsset = {
|
||||
originalPath: string;
|
||||
owner?: User | null;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
tags?: Tag[];
|
||||
|
||||
16
server/src/dtos/database-backup.dto.ts
Normal file
16
server/src/dtos/database-backup.dto.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
|
||||
export class DatabaseBackupListResponseDto {
|
||||
backups!: string[];
|
||||
}
|
||||
|
||||
export class DatabaseBackupUploadDto {
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
file?: any;
|
||||
}
|
||||
|
||||
export class DatabaseBackupDeleteDto {
|
||||
@IsString({ each: true })
|
||||
backups!: string[];
|
||||
}
|
||||
@@ -1,9 +1,12 @@
|
||||
import { MaintenanceAction } from 'src/enum';
|
||||
import { MaintenanceAction, StorageFolder } from 'src/enum';
|
||||
import { ValidateEnum, ValidateString } from 'src/validation';
|
||||
|
||||
export class SetMaintenanceModeDto {
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
@ValidateString({ optional: true })
|
||||
restoreBackupFilename?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceLoginDto {
|
||||
@@ -14,3 +17,26 @@ export class MaintenanceLoginDto {
|
||||
export class MaintenanceAuthDto {
|
||||
username!: string;
|
||||
}
|
||||
|
||||
export class MaintenanceStatusResponseDto {
|
||||
active!: boolean;
|
||||
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
progress?: number;
|
||||
task?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallStorageFolderDto {
|
||||
@ValidateEnum({ enum: StorageFolder, name: 'StorageFolder' })
|
||||
folder!: StorageFolder;
|
||||
readable!: boolean;
|
||||
writable!: boolean;
|
||||
files!: number;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallResponseDto {
|
||||
storage!: MaintenanceDetectInstallStorageFolderDto[];
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { Optional, ValidateBoolean, ValidateEnum } from 'src/validation';
|
||||
|
||||
export class WorkflowFilterItemDto {
|
||||
@IsUUID()
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -16,7 +16,7 @@ export class WorkflowFilterItemDto {
|
||||
|
||||
export class WorkflowActionItemDto {
|
||||
@IsUUID()
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -86,7 +86,7 @@ export class WorkflowResponseDto {
|
||||
export class WorkflowFilterResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
filterConfig!: FilterConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -94,7 +94,7 @@ export class WorkflowFilterResponseDto {
|
||||
export class WorkflowActionResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
actionConfig!: ActionConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -103,7 +103,7 @@ export function mapWorkflowFilter(filter: WorkflowFilter): WorkflowFilterRespons
|
||||
return {
|
||||
id: filter.id,
|
||||
workflowId: filter.workflowId,
|
||||
filterId: filter.filterId,
|
||||
pluginFilterId: filter.pluginFilterId,
|
||||
filterConfig: filter.filterConfig,
|
||||
order: filter.order,
|
||||
};
|
||||
@@ -113,7 +113,7 @@ export function mapWorkflowAction(action: WorkflowAction): WorkflowActionRespons
|
||||
return {
|
||||
id: action.id,
|
||||
workflowId: action.workflowId,
|
||||
actionId: action.actionId,
|
||||
pluginActionId: action.pluginActionId,
|
||||
actionConfig: action.actionConfig,
|
||||
order: action.order,
|
||||
};
|
||||
|
||||
@@ -44,6 +44,7 @@ export enum AssetFileType {
|
||||
FullSize = 'fullsize',
|
||||
Preview = 'preview',
|
||||
Thumbnail = 'thumbnail',
|
||||
Sidecar = 'sidecar',
|
||||
}
|
||||
|
||||
export enum AlbumUserRole {
|
||||
@@ -127,6 +128,11 @@ export enum Permission {
|
||||
|
||||
ArchiveRead = 'archive.read',
|
||||
|
||||
BackupList = 'backup.list',
|
||||
BackupDownload = 'backup.download',
|
||||
BackupUpload = 'backup.upload',
|
||||
BackupDelete = 'backup.delete',
|
||||
|
||||
DuplicateRead = 'duplicate.read',
|
||||
DuplicateDelete = 'duplicate.delete',
|
||||
|
||||
@@ -678,12 +684,14 @@ export enum DatabaseLock {
|
||||
MediaLocation = 700,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
MaintenanceOperation = 621,
|
||||
MemoryCreation = 777,
|
||||
}
|
||||
|
||||
export enum MaintenanceAction {
|
||||
Start = 'start',
|
||||
End = 'end',
|
||||
RestoreDatabase = 'restore_database',
|
||||
}
|
||||
|
||||
export enum ExitCode {
|
||||
@@ -830,6 +838,7 @@ export enum ApiTag {
|
||||
Authentication = 'Authentication',
|
||||
AuthenticationAdmin = 'Authentication (admin)',
|
||||
Assets = 'Assets',
|
||||
DatabaseBackups = 'Database Backups (admin)',
|
||||
Deprecated = 'Deprecated',
|
||||
Download = 'Download',
|
||||
Duplicates = 'Duplicates',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
import { ChildProcess, fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PostgresError } from 'postgres';
|
||||
import { ImmichAdminModule } from 'src/app.module';
|
||||
import { ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { DatabaseLock, ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type DB } from 'src/schema';
|
||||
@@ -35,16 +35,14 @@ class Workers {
|
||||
if (isMaintenanceMode) {
|
||||
this.startWorker(ImmichWorker.Maintenance);
|
||||
} else {
|
||||
await this.waitForFreeLock();
|
||||
|
||||
for (const worker of workers) {
|
||||
this.startWorker(worker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise a short-lived Nest application to build configuration
|
||||
* @returns System configuration
|
||||
*/
|
||||
private async isMaintenanceMode(): Promise<boolean> {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
@@ -65,6 +63,32 @@ class Workers {
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForFreeLock() {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
|
||||
let locked = false;
|
||||
while (!locked) {
|
||||
locked = await kysely.connection().execute(async (conn) => {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
}>`SELECT pg_try_advisory_lock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
|
||||
const isLocked = rows[0].pg_try_advisory_lock;
|
||||
|
||||
if (isLocked) {
|
||||
await sql`SELECT pg_advisory_unlock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
}
|
||||
|
||||
return isLocked;
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await kysely.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start an individual worker
|
||||
* @param name Worker
|
||||
|
||||
@@ -7,17 +7,24 @@ import {
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { MaintenanceAuthDto, MaintenanceStatusResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AppRestartEvent, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { AppRestartEvent } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
export const serverEvents = ['AppRestart'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
interface ServerEventMap {
|
||||
AppRestart: [AppRestartEvent];
|
||||
MaintenanceStatus: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
MaintenanceStatusV1: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
type AuthFn = (client: Socket) => Promise<MaintenanceAuthDto>;
|
||||
type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
@@ -25,8 +32,11 @@ export interface ClientEventMap {
|
||||
})
|
||||
@Injectable()
|
||||
export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
private statusUpdateFn?: StatusUpdateFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private websocketServer?: Server;
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
@@ -35,25 +45,46 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
this.logger.setContext(MaintenanceWebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(websocketServer: Server) {
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
websocketServer.on('AppRestart', () => this.appRepository.exitApp());
|
||||
server.on('AppRestart', () => this.appRepository.exitApp());
|
||||
server.on('MaintenanceStatus', (status) => this.statusUpdateFn?.(status));
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.websocketServer?.emit(event, ...data);
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
serverSend<T extends keyof ServerEventMap>(event: T, ...args: ServerEventMap[T]): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.websocketServer?.serverSideEmit(event, ...args);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
handleConnection(client: Socket) {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
await this.authFn!(client);
|
||||
await client.join('private');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (private)`);
|
||||
} catch {
|
||||
await client.join('public');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (public)`);
|
||||
}
|
||||
}
|
||||
|
||||
handleDisconnect(client: Socket) {
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await Promise.allSettled([client.leave('private'), client.leave('public')]);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<MaintenanceAuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
setStatusUpdateFn(fn: (status: MaintenanceStatusResponseDto) => void) {
|
||||
this.statusUpdateFn = fn;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,109 @@
|
||||
import { Body, Controller, Get, Post, Req, Res } from '@nestjs/common';
|
||||
import { Request, Response } from 'express';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Next,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie, MaintenanceAction } from 'src/enum';
|
||||
import { ImmichCookie } from 'src/enum';
|
||||
import { MaintenanceRoute } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(private service: MaintenanceWorkerService) {}
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* {@link _ServerController.getServerConfig }
|
||||
*/
|
||||
@Get('server/config')
|
||||
getServerConfig(): Promise<ServerConfigDto> {
|
||||
getServerConfig(): ServerConfigDto {
|
||||
return this.service.getSystemConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.listDatabaseBackups}
|
||||
*/
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.downloadDatabaseBackup}
|
||||
*/
|
||||
@Get('admin/database-backups/:filename')
|
||||
@MaintenanceRoute()
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.deleteDatabaseBackup}
|
||||
*/
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.uploadDatabaseBackup}
|
||||
*/
|
||||
@Post('admin/database-backups/upload')
|
||||
@MaintenanceRoute()
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
maintenanceStatus(@Req() request: Request): Promise<MaintenanceStatusResponseDto> {
|
||||
return this.service.status(request.cookies[ImmichCookie.MaintenanceToken]);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/detect-install')
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('admin/maintenance/login')
|
||||
async maintenanceLogin(
|
||||
@Req() request: Request,
|
||||
@@ -35,9 +121,7 @@ export class MaintenanceWorkerController {
|
||||
|
||||
@Post('admin/maintenance')
|
||||
@MaintenanceRoute()
|
||||
async setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.End) {
|
||||
await this.service.endMaintenance();
|
||||
}
|
||||
setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): void {
|
||||
void this.service.setAction(dto);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,44 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, getMocks, ServiceMocks } from 'test/utils';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWorkerRepositoryMock: MaintenanceWebsocketRepository;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
maintenanceWorkerRepositoryMock = automock(MaintenanceWebsocketRepository, { args: [mocks.logger], strict: false });
|
||||
maintenanceWebsocketRepositoryMock = automock(MaintenanceWebsocketRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
mocks.app,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
maintenanceWorkerRepositoryMock,
|
||||
maintenanceWebsocketRepositoryMock,
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -27,14 +46,43 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
describe('getSystemConfig', () => {
|
||||
it('should respond the server is in maintenance mode', async () => {
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject(
|
||||
it('should respond the server is in maintenance mode', () => {
|
||||
expect(sut.getSystemConfig()).toMatchObject(
|
||||
expect.objectContaining({
|
||||
maintenanceMode: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('ssr');
|
||||
describe.skip('detectMediaLocation');
|
||||
|
||||
describe('setStatus', () => {
|
||||
it('should broadcast status', () => {
|
||||
sut.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalled();
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledTimes(2);
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'Something went wrong, see logs!',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -42,7 +90,14 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should log a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.logSecret()).resolves.toBeUndefined();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith(expect.stringMatching(RE_LOGIN_URL));
|
||||
|
||||
@@ -63,7 +118,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should parse cookie properly', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
@@ -73,13 +134,102 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('status', () => {
|
||||
beforeEach(() => {
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
error: 'secret value!',
|
||||
});
|
||||
});
|
||||
|
||||
it('generates private status', async () => {
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime('4h')
|
||||
.sign(new TextEncoder().encode('secret'));
|
||||
|
||||
await expect(sut.status(jwt)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'secret value!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates public status', async () => {
|
||||
await expect(sut.status()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should fail without token', async () => {
|
||||
await expect(sut.login()).rejects.toThrowError(new UnauthorizedException('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should fail with expired JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({})
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -91,7 +241,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should succeed with valid JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -107,22 +263,232 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('endMaintenance', () => {
|
||||
describe.skip('setAction'); // just calls setStatus+runAction
|
||||
|
||||
/**
|
||||
* Actions
|
||||
*/
|
||||
|
||||
describe('action: start', () => {
|
||||
it('should not do anything', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
|
||||
expect(mocks.logger.log).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: end', () => {
|
||||
it('should set maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
await expect(sut.endMaintenance()).resolves.toBeUndefined();
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(mocks.database.tryLock).toHaveBeenCalled();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith('Running maintenance action restore_database');
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: expect.any(Number),
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,19 +4,38 @@ import { NextFunction, Request, Response } from 'express';
|
||||
import { jwtVerify } from 'jose';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { ImmichCookie, SystemMetadataKey } from 'src/enum';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { DatabaseLock, ImmichCookie, MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import { createMaintenanceLoginUrl } from 'src/utils/maintenance';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -24,16 +43,50 @@ import { getExternalDomain } from 'src/utils/misc';
|
||||
*/
|
||||
@Injectable()
|
||||
export class MaintenanceWorkerService {
|
||||
#secret: string = null!;
|
||||
#status: MaintenanceStatusResponseDto = {
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
};
|
||||
|
||||
constructor(
|
||||
protected logger: LoggingRepository,
|
||||
private appRepository: AppRepository,
|
||||
private configRepository: ConfigRepository,
|
||||
private systemMetadataRepository: SystemMetadataRepository,
|
||||
private maintenanceWorkerRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceWebsocketRepository: MaintenanceWebsocketRepository,
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
mock(status: MaintenanceStatusResponseDto) {
|
||||
this.#secret = 'secret';
|
||||
this.#status = status;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const state = (await this.systemMetadataRepository.get(
|
||||
SystemMetadataKey.MaintenanceMode,
|
||||
)) as MaintenanceModeState & { isMaintenanceMode: true };
|
||||
|
||||
this.#secret = state.secret;
|
||||
this.#status = {
|
||||
active: true,
|
||||
action: state.action.action,
|
||||
};
|
||||
|
||||
StorageCore.setMediaLocation(this.detectMediaLocation());
|
||||
|
||||
this.maintenanceWebsocketRepository.setAuthFn(async (client) => this.authenticate(client.request.headers));
|
||||
this.maintenanceWebsocketRepository.setStatusUpdateFn((status) => (this.#status = status));
|
||||
|
||||
await this.logSecret();
|
||||
void this.runAction(state.action);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _BaseService.configRepos}
|
||||
*/
|
||||
@@ -55,22 +108,10 @@ export class MaintenanceWorkerService {
|
||||
/**
|
||||
* {@link _ServerService.getSystemConfig}
|
||||
*/
|
||||
async getSystemConfig() {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
|
||||
getSystemConfig() {
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized: true,
|
||||
isOnboarded: true,
|
||||
externalDomain: config.server.externalDomain,
|
||||
publicUsers: config.server.publicUsers,
|
||||
mapDarkStyleUrl: config.map.darkStyle,
|
||||
mapLightStyleUrl: config.map.lightStyle,
|
||||
maintenanceMode: true,
|
||||
};
|
||||
} as ServerConfigDto;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,12 +147,89 @@ export class MaintenanceWorkerService {
|
||||
};
|
||||
}
|
||||
|
||||
private async secret(): Promise<string> {
|
||||
const state = (await this.systemMetadataRepository.get(SystemMetadataKey.MaintenanceMode)) as {
|
||||
secret: string;
|
||||
};
|
||||
/**
|
||||
* {@link _StorageService.detectMediaLocation}
|
||||
*/
|
||||
detectMediaLocation(): string {
|
||||
const envData = this.configRepository.getEnv();
|
||||
if (envData.storage.mediaLocation) {
|
||||
return envData.storage.mediaLocation;
|
||||
}
|
||||
|
||||
return state.secret;
|
||||
const targets: string[] = [];
|
||||
const candidates = ['/data', '/usr/src/app/upload'];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const exists = this.storageRepository.existsSync(candidate);
|
||||
if (exists) {
|
||||
targets.push(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.length === 1) {
|
||||
return targets[0];
|
||||
}
|
||||
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: string[] }> {
|
||||
return { backups: await listDatabaseBackups(this.backupRepos) };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
|
||||
private getStatus(): MaintenanceStatusResponseDto {
|
||||
return this.#status;
|
||||
}
|
||||
|
||||
private getPublicStatus(): MaintenanceStatusResponseDto {
|
||||
const state = structuredClone(this.#status);
|
||||
|
||||
if (state.error) {
|
||||
state.error = 'Something went wrong, see logs!';
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
setStatus(status: MaintenanceStatusResponseDto): void {
|
||||
this.#status = status;
|
||||
this.maintenanceWebsocketRepository.serverSend('MaintenanceStatus', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'private', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'public', this.getPublicStatus());
|
||||
}
|
||||
|
||||
async logSecret(): Promise<void> {
|
||||
@@ -123,7 +241,7 @@ export class MaintenanceWorkerService {
|
||||
{
|
||||
username: 'immich-admin',
|
||||
},
|
||||
await this.secret(),
|
||||
this.#secret,
|
||||
);
|
||||
|
||||
this.logger.log(`\n\n🚧 Immich is in maintenance mode, you can log in using the following URL:\n${url}\n`);
|
||||
@@ -134,28 +252,120 @@ export class MaintenanceWorkerService {
|
||||
return this.login(jwtToken);
|
||||
}
|
||||
|
||||
async status(potentiallyJwt?: string): Promise<MaintenanceStatusResponseDto> {
|
||||
try {
|
||||
await this.login(potentiallyJwt);
|
||||
return this.getStatus();
|
||||
} catch {
|
||||
return this.getPublicStatus();
|
||||
}
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async login(jwt?: string): Promise<MaintenanceAuthDto> {
|
||||
if (!jwt) {
|
||||
throw new UnauthorizedException('Missing JWT Token');
|
||||
}
|
||||
|
||||
const secret = await this.secret();
|
||||
|
||||
try {
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(secret));
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(this.#secret));
|
||||
return result.payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid JWT Token');
|
||||
}
|
||||
}
|
||||
|
||||
async endMaintenance(): Promise<void> {
|
||||
async setAction(action: SetMaintenanceModeDto) {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
});
|
||||
|
||||
await this.runAction(action);
|
||||
}
|
||||
|
||||
async runAction(action: SetMaintenanceModeDto) {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.Start: {
|
||||
return;
|
||||
}
|
||||
case MaintenanceAction.End: {
|
||||
return this.endMaintenance();
|
||||
}
|
||||
case MaintenanceAction.RestoreDatabase: {
|
||||
if (!action.restoreBackupFilename) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const lock = await this.databaseRepository.tryLock(DatabaseLock.MaintenanceOperation);
|
||||
if (!lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Running maintenance action ${action.action}`);
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: this.#secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.RestoreDatabase: {
|
||||
await this.restoreBackup(action.restoreBackupFilename);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Encountered error running action: ${error}`);
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
task: 'error',
|
||||
error: '' + error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreBackup(filename: string): Promise<void> {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
progress,
|
||||
task,
|
||||
}),
|
||||
);
|
||||
|
||||
await this.setAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
}
|
||||
|
||||
private async endMaintenance(): Promise<void> {
|
||||
const state: MaintenanceModeState = { isMaintenanceMode: false as const };
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, state);
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
this.maintenanceWorkerRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWorkerRepository.serverSend('AppRestart', state);
|
||||
this.maintenanceWebsocketRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWebsocketRepository.serverSend('AppRestart', state);
|
||||
this.appRepository.exitApp();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,23 @@ limit
|
||||
-- AssetJobRepository.getForSidecarWriteJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
@@ -39,21 +54,36 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.getForSidecarCheckJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath"
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.streamForThumbnailJob
|
||||
select
|
||||
@@ -158,7 +188,6 @@ select
|
||||
"asset"."originalFileName",
|
||||
"asset"."originalPath",
|
||||
"asset"."ownerId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."type",
|
||||
(
|
||||
select
|
||||
@@ -173,11 +202,27 @@ select
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
) as agg
|
||||
) as "faces"
|
||||
) as "faces",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
"asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.getAlbumThumbnailFiles
|
||||
select
|
||||
@@ -322,7 +367,6 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."ownerId",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."encodedVideoPath",
|
||||
"asset"."originalPath",
|
||||
to_json("asset_exif") as "exifInfo",
|
||||
@@ -433,18 +477,33 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."id" = $1
|
||||
and "asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.streamForStorageTemplateJob
|
||||
select
|
||||
@@ -454,12 +513,27 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
@@ -481,11 +555,15 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
(
|
||||
"asset"."sidecarPath" = $1
|
||||
or "asset"."sidecarPath" is null
|
||||
not exists (
|
||||
select
|
||||
"asset_file"."id"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
)
|
||||
and "asset"."visibility" != $2
|
||||
|
||||
-- AssetJobRepository.streamForDetectFacesJob
|
||||
select
|
||||
|
||||
@@ -216,6 +216,34 @@ from
|
||||
limit
|
||||
3
|
||||
|
||||
-- AssetRepository.getForCopy
|
||||
select
|
||||
"id",
|
||||
"stackId",
|
||||
"originalPath",
|
||||
"isFavorite",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"id" = $1::uuid
|
||||
limit
|
||||
$2
|
||||
|
||||
-- AssetRepository.getById
|
||||
select
|
||||
"asset".*
|
||||
|
||||
@@ -6,7 +6,6 @@ import { Asset, columns } from 'src/database';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageAsset } from 'src/types';
|
||||
import {
|
||||
anyUuid,
|
||||
asUuid,
|
||||
@@ -40,7 +39,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.select((eb) =>
|
||||
jsonArrayFrom(
|
||||
eb
|
||||
@@ -59,7 +59,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -122,6 +123,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(columns.asset)
|
||||
.select(withFaces)
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -228,7 +230,6 @@ export class AssetJobRepository {
|
||||
'asset.libraryId',
|
||||
'asset.ownerId',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.sidecarPath',
|
||||
'asset.encodedVideoPath',
|
||||
'asset.originalPath',
|
||||
])
|
||||
@@ -306,26 +307,24 @@ export class AssetJobRepository {
|
||||
'asset.checksum',
|
||||
'asset.originalPath',
|
||||
'asset.isExternal',
|
||||
'asset.sidecarPath',
|
||||
'asset.originalFileName',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.fileCreatedAt',
|
||||
'asset_exif.timeZone',
|
||||
'asset_exif.fileSizeInByte',
|
||||
])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.deletedAt', 'is', null);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst() as Promise<
|
||||
StorageAsset | undefined
|
||||
>;
|
||||
getForStorageTemplateJob(id: string) {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [], stream: true })
|
||||
streamForStorageTemplateJob() {
|
||||
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
|
||||
return this.storageTemplateAssetQuery().stream();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
@@ -343,9 +342,18 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id'])
|
||||
.$if(!force, (qb) =>
|
||||
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
|
||||
qb.where((eb) =>
|
||||
eb.not(
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('asset_file')
|
||||
.select('asset_file.id')
|
||||
.whereRef('asset_file.assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.Sidecar),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.stream();
|
||||
}
|
||||
|
||||
|
||||
@@ -396,6 +396,17 @@ export class AssetRepository {
|
||||
return this.db.selectFrom('asset_file').select(['assetId', 'path']).limit(sql.lit(3)).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForCopy(id: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id', 'stackId', 'originalPath', 'isFavorite'])
|
||||
.select(withFiles)
|
||||
.where('id', '=', asUuid(id))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
return this.db
|
||||
@@ -842,6 +853,10 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteFile({ assetId, type }: { assetId: string; type: AssetFileType }): Promise<void> {
|
||||
await this.db.deleteFrom('asset_file').where('assetId', '=', asUuid(assetId)).where('type', '=', type).execute();
|
||||
}
|
||||
|
||||
async deleteFiles(files: Pick<Selectable<AssetFileTable>, 'id'>[]): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
|
||||
@@ -403,7 +403,6 @@ export class DatabaseRepository {
|
||||
.set((eb) => ({
|
||||
originalPath: eb.fn('REGEXP_REPLACE', ['originalPath', source, target]),
|
||||
encodedVideoPath: eb.fn('REGEXP_REPLACE', ['encodedVideoPath', source, target]),
|
||||
sidecarPath: eb.fn('REGEXP_REPLACE', ['sidecarPath', source, target]),
|
||||
}))
|
||||
.execute();
|
||||
|
||||
|
||||
@@ -24,9 +24,8 @@ export class OAuthRepository {
|
||||
}
|
||||
|
||||
async authorize(config: OAuthConfig, redirectUrl: string, state?: string, codeChallenge?: string) {
|
||||
const { buildAuthorizationUrl, randomState, randomPKCECodeVerifier, calculatePKCECodeChallenge } = await import(
|
||||
'openid-client'
|
||||
);
|
||||
const { buildAuthorizationUrl, randomState, randomPKCECodeVerifier, calculatePKCECodeChallenge } =
|
||||
await import('openid-client');
|
||||
const client = await this.getClient(config);
|
||||
state ??= randomState();
|
||||
|
||||
|
||||
85
server/src/repositories/process.repository.spec.ts
Normal file
85
server/src/repositories/process.repository.spec.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
|
||||
function* data() {
|
||||
yield 'Hello, world!';
|
||||
}
|
||||
|
||||
describe(ProcessRepository.name, () => {
|
||||
let sut: ProcessRepository;
|
||||
let sink: Writable;
|
||||
|
||||
beforeAll(() => {
|
||||
sut = new ProcessRepository();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sink = new Writable({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should accept stdin / output stdout', async () => {
|
||||
let output = '';
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
output += chunk;
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.createSpawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should drain stdin on process exit', async () => {
|
||||
let resolve1: () => void;
|
||||
let resolve2: () => void;
|
||||
const promise1 = new Promise<void>((r) => (resolve1 = r));
|
||||
const promise2 = new Promise<void>((r) => (resolve2 = r));
|
||||
|
||||
async function* data() {
|
||||
yield 'Hello, world!';
|
||||
await promise1;
|
||||
await promise2;
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
realProcess.stdin.on('close', () => setImmediate(() => resolve2()));
|
||||
|
||||
await pipeline(Readable.from(data()), process);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,109 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Duplex } from 'node:stream';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository {
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
createSpawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
const process = this.spawn(command, args, options);
|
||||
const duplex = new Duplex({
|
||||
// duplex -> stdin
|
||||
write(chunk, encoding, callback) {
|
||||
// drain the input if process dies
|
||||
if (stdinClosed) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
// handle stream backpressure
|
||||
if (process.stdin.write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
drainCallback = callback;
|
||||
process.stdin.once('drain', () => {
|
||||
drainCallback = undefined;
|
||||
callback();
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
read() {
|
||||
// no-op
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
if (stdinClosed) {
|
||||
callback();
|
||||
} else {
|
||||
process.stdin.end(callback);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// stdout -> duplex
|
||||
process.stdout.on('data', (chunk) => {
|
||||
// handle stream backpressure
|
||||
if (!duplex.push(chunk)) {
|
||||
process.stdout.pause();
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('resume', () => process.stdout.resume());
|
||||
|
||||
// end handling
|
||||
let stdoutClosed = false;
|
||||
function close(error?: Error) {
|
||||
stdinClosed = true;
|
||||
|
||||
if (error) {
|
||||
duplex.destroy(error);
|
||||
} else if (stdoutClosed && typeof process.exitCode === 'number') {
|
||||
duplex.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.on('close', () => {
|
||||
stdoutClosed = true;
|
||||
close();
|
||||
});
|
||||
|
||||
// error handling
|
||||
process.on('error', close);
|
||||
process.stdout.on('error', close);
|
||||
process.stdin.on('error', (error) => {
|
||||
if ((error as { code?: 'EPIPE' })?.code === 'EPIPE') {
|
||||
try {
|
||||
drainCallback!();
|
||||
} catch (error) {
|
||||
close(error as Error);
|
||||
}
|
||||
} else {
|
||||
close(error);
|
||||
}
|
||||
});
|
||||
|
||||
let stderr = '';
|
||||
process.stderr.on('data', (chunk) => (stderr += chunk));
|
||||
|
||||
process.on('exit', (code) => {
|
||||
console.info(`${command} exited (${code})`);
|
||||
|
||||
if (code === 0) {
|
||||
close();
|
||||
} else {
|
||||
close(new Error(`${command} non-zero exit code (${code})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
// attach _process to Duplex for testing suite
|
||||
(duplex as never as { _process: ChildProcessWithoutNullStreams })._process = process;
|
||||
|
||||
return duplex;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,8 @@ import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { createGunzip, createGzip } from 'node:zlib';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -93,6 +94,18 @@ export class StorageRepository {
|
||||
return { stream: archive, addFile, finalize };
|
||||
}
|
||||
|
||||
createGzip(): PassThrough {
|
||||
return createGzip();
|
||||
}
|
||||
|
||||
createGunzip(): PassThrough {
|
||||
return createGunzip();
|
||||
}
|
||||
|
||||
createPlainReadStream(filepath: string): Readable {
|
||||
return createReadStream(filepath);
|
||||
}
|
||||
|
||||
async createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream> {
|
||||
const { size } = await fs.stat(filepath);
|
||||
await fs.access(filepath, constants.R_OK);
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`INSERT INTO "asset_file" ("assetId", "path", "type")
|
||||
SELECT
|
||||
id, "sidecarPath", 'sidecar'
|
||||
FROM "asset"
|
||||
WHERE "sidecarPath" IS NOT NULL AND "sidecarPath" != '';`.execute(db);
|
||||
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "sidecarPath";`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset" ADD "sidecarPath" character varying;`.execute(db);
|
||||
|
||||
await sql`
|
||||
UPDATE "asset"
|
||||
SET "sidecarPath" = "asset_file"."path"
|
||||
FROM "asset_file"
|
||||
WHERE "asset"."id" = "asset_file"."assetId" AND "asset_file"."type" = 'sidecar';
|
||||
`.execute(db);
|
||||
|
||||
await sql`DELETE FROM "asset_file" WHERE "type" = 'sidecar';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_filterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_actionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_filterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_actionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "filterId" TO "pluginFilterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "actionId" TO "pluginActionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_pluginFilterId_fkey" FOREIGN KEY ("pluginFilterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_pluginActionId_fkey" FOREIGN KEY ("pluginActionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_pluginFilterId_idx" ON "workflow_filter" ("pluginFilterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_pluginActionId_idx" ON "workflow_action" ("pluginActionId");`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_pluginFilterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_pluginActionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_pluginFilterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_pluginActionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "pluginFilterId" TO "filterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "pluginActionId" TO "actionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_filterId_fkey" FOREIGN KEY ("filterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_actionId_fkey" FOREIGN KEY ("actionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_filterId_idx" ON "workflow_filter" ("filterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_actionId_idx" ON "workflow_action" ("actionId");`.execute(db);
|
||||
}
|
||||
@@ -105,9 +105,6 @@ export class AssetTable {
|
||||
@Column({ index: true })
|
||||
originalFileName!: string;
|
||||
|
||||
@Column({ nullable: true })
|
||||
sidecarPath!: string | null;
|
||||
|
||||
@Column({ type: 'bytea', nullable: true })
|
||||
thumbhash!: Buffer | null;
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ export class WorkflowTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['filterId'] })
|
||||
@Index({ columns: ['pluginFilterId'] })
|
||||
@Table('workflow_filter')
|
||||
export class WorkflowFilterTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -48,7 +48,7 @@ export class WorkflowFilterTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginFilterTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
filterConfig!: FilterConfig | null;
|
||||
@@ -58,7 +58,7 @@ export class WorkflowFilterTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['actionId'] })
|
||||
@Index({ columns: ['pluginActionId'] })
|
||||
@Table('workflow_action')
|
||||
export class WorkflowActionTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -68,7 +68,7 @@ export class WorkflowActionTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginActionTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
actionConfig!: ActionConfig | null;
|
||||
|
||||
@@ -174,7 +174,6 @@ const assetEntity = Object.freeze({
|
||||
longitude: 10.703_075,
|
||||
},
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: null,
|
||||
} as MapAsset);
|
||||
|
||||
const existingAsset = Object.freeze({
|
||||
@@ -188,7 +187,6 @@ const existingAsset = Object.freeze({
|
||||
|
||||
const sidecarAsset = Object.freeze({
|
||||
...existingAsset,
|
||||
sidecarPath: 'sidecar-path',
|
||||
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
|
||||
}) as MapAsset;
|
||||
|
||||
@@ -721,18 +719,22 @@ describe(AssetMediaService.name, () => {
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: existingAsset.id,
|
||||
sidecarPath: null,
|
||||
originalFileName: 'photo1.jpeg',
|
||||
originalPath: 'fake_path/photo1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sidecarPath: null,
|
||||
originalFileName: 'existing-filename.jpeg',
|
||||
originalPath: 'fake_path/asset_1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
|
||||
deletedAt: expect.any(Date),
|
||||
@@ -769,6 +771,13 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -798,6 +807,12 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -827,6 +842,9 @@ describe(AssetMediaService.name, () => {
|
||||
|
||||
expect(mocks.asset.create).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [updatedFile.originalPath, undefined] },
|
||||
|
||||
@@ -21,7 +21,16 @@ import {
|
||||
UploadFieldName,
|
||||
} from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
CacheControl,
|
||||
JobName,
|
||||
Permission,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { AuthRequest } from 'src/middleware/auth.guard';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { UploadFile, UploadRequest } from 'src/types';
|
||||
@@ -354,9 +363,12 @@ export class AssetMediaService extends BaseService {
|
||||
duration: dto.duration || null,
|
||||
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: sidecarPath || null,
|
||||
});
|
||||
|
||||
await (sidecarPath
|
||||
? this.assetRepository.upsertFile({ assetId, type: AssetFileType.Sidecar, path: sidecarPath })
|
||||
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
|
||||
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
|
||||
await this.jobRepository.queue({
|
||||
@@ -384,7 +396,6 @@ export class AssetMediaService extends BaseService {
|
||||
localDateTime: asset.localDateTime,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
sidecarPath: asset.sidecarPath,
|
||||
});
|
||||
|
||||
const { size } = await this.storageRepository.stat(created.originalPath);
|
||||
@@ -414,7 +425,6 @@ export class AssetMediaService extends BaseService {
|
||||
visibility: dto.visibility ?? AssetVisibility.Timeline,
|
||||
livePhotoVideoId: dto.livePhotoVideoId,
|
||||
originalFileName: dto.filename || file.originalName,
|
||||
sidecarPath: sidecarFile?.originalPath,
|
||||
});
|
||||
|
||||
if (dto.metadata) {
|
||||
@@ -422,6 +432,11 @@ export class AssetMediaService extends BaseService {
|
||||
}
|
||||
|
||||
if (sidecarFile) {
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: asset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
}
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
|
||||
@@ -585,8 +585,8 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
assetWithFace.encodedVideoPath,
|
||||
assetWithFace.sidecarPath,
|
||||
assetWithFace.encodedVideoPath, // this value is null
|
||||
undefined, // no sidecar path
|
||||
assetWithFace.originalPath,
|
||||
],
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { AssetFile } from 'src/database';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
@@ -18,7 +19,16 @@ import {
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetVisibility,
|
||||
JobName,
|
||||
JobStatus,
|
||||
Permission,
|
||||
QueueName,
|
||||
} from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||
import { requireElevatedPermission } from 'src/utils/access';
|
||||
@@ -197,8 +207,8 @@ export class AssetService extends BaseService {
|
||||
}: AssetCopyDto,
|
||||
) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
|
||||
const sourceAsset = await this.assetRepository.getById(sourceId);
|
||||
const targetAsset = await this.assetRepository.getById(targetId);
|
||||
const sourceAsset = await this.assetRepository.getForCopy(sourceId);
|
||||
const targetAsset = await this.assetRepository.getForCopy(targetId);
|
||||
|
||||
if (!sourceAsset || !targetAsset) {
|
||||
throw new BadRequestException('Both assets must exist');
|
||||
@@ -252,19 +262,25 @@ export class AssetService extends BaseService {
|
||||
sourceAsset,
|
||||
targetAsset,
|
||||
}: {
|
||||
sourceAsset: { sidecarPath: string | null };
|
||||
targetAsset: { id: string; sidecarPath: string | null; originalPath: string };
|
||||
sourceAsset: { files: AssetFile[] };
|
||||
targetAsset: { id: string; files: AssetFile[]; originalPath: string };
|
||||
}) {
|
||||
if (!sourceAsset.sidecarPath) {
|
||||
const { sidecarFile: sourceFile } = getAssetFiles(sourceAsset.files);
|
||||
if (!sourceFile?.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (targetAsset.sidecarPath) {
|
||||
await this.storageRepository.unlink(targetAsset.sidecarPath);
|
||||
const { sidecarFile: targetFile } = getAssetFiles(targetAsset.files ?? []);
|
||||
if (targetFile?.path) {
|
||||
await this.storageRepository.unlink(targetFile.path);
|
||||
}
|
||||
|
||||
await this.storageRepository.copyFile(sourceAsset.sidecarPath, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.update({ id: targetAsset.id, sidecarPath: `${targetAsset.originalPath}.xmp` });
|
||||
await this.storageRepository.copyFile(sourceFile.path, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: targetAsset.id,
|
||||
path: `${targetAsset.originalPath}.xmp`,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: targetAsset.id } });
|
||||
}
|
||||
|
||||
@@ -344,11 +360,11 @@ export class AssetService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
|
||||
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||
|
||||
if (deleteOnDisk) {
|
||||
files.push(asset.sidecarPath, asset.originalPath);
|
||||
files.push(sidecarFile?.path, asset.originalPath);
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });
|
||||
|
||||
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
@@ -147,6 +147,7 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -165,7 +166,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -174,14 +175,16 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawn).toHaveBeenCalled();
|
||||
const call = mocks.process.spawn.mock.calls[0];
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.createSpawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
// ['--dbname', '<url>', '--clean', '--if-exists']
|
||||
expect(args[0]).toBe('--dbname');
|
||||
const passedUrl = args[1];
|
||||
expect(passedUrl).not.toContain('uselibpqcompat');
|
||||
expect(passedUrl).toContain('sslmode=require');
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
@@ -196,21 +199,21 @@ describe(BackupService.name, () => {
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
@@ -226,9 +229,9 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -242,12 +245,12 @@ describe(BackupService.name, () => {
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -53,16 +56,11 @@ export class BackupService extends BaseService {
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => {
|
||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.filter((fn) => isValidDatabaseRoutineBackupName(fn))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((fn) => isFailedDatabaseBackupName(fn));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
@@ -75,123 +73,27 @@ export class BackupService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
const { database } = this.configRepository.getEnv();
|
||||
const config = database.config;
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
|
||||
let connectionUrl: string = isUrlConnection ? config.url : '';
|
||||
if (URL.canParse(connectionUrl)) {
|
||||
// remove known bad url parameters for pg_dumpall
|
||||
const url = new URL(connectionUrl);
|
||||
url.searchParams.delete('uselibpqcompat');
|
||||
connectionUrl = url.toString();
|
||||
}
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', connectionUrl]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? new URL(connectionUrl).password : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error(`Backup failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error(`Gzip failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { jwtVerify } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -94,7 +94,14 @@ describe(CliService.name, () => {
|
||||
});
|
||||
|
||||
it('should disable maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.disableMaintenanceMode()).resolves.toEqual({
|
||||
alreadyDisabled: false,
|
||||
});
|
||||
@@ -107,7 +114,14 @@ describe(CliService.name, () => {
|
||||
|
||||
describe('enableMaintenanceMode', () => {
|
||||
it('should not do anything if in maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.enableMaintenanceMode()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
alreadyEnabled: true,
|
||||
@@ -129,13 +143,22 @@ describe(CliService.name, () => {
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should return a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const result = await sut.enableMaintenanceMode();
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { isAbsolute } from 'node:path';
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, sendOneShotAppRestart } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
@@ -87,6 +87,9 @@ export class CliService extends BaseService {
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
sendOneShotAppRestart({
|
||||
|
||||
82
server/src/services/database-backup.service.spec.ts
Normal file
82
server/src/services/database-backup.service.spec.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
41
server/src/services/database-backup.service.ts
Normal file
41
server/src/services/database-backup.service.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
|
||||
/**
|
||||
* This service is available outside of maintenance mode to manage maintenance mode
|
||||
*/
|
||||
@Injectable()
|
||||
export class DatabaseBackupService extends BaseService {
|
||||
async listBackups(): Promise<{ backups: string[] }> {
|
||||
return { backups: await listDatabaseBackups(this.backupRepos) };
|
||||
}
|
||||
|
||||
deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
@@ -59,6 +60,7 @@ export const services = [
|
||||
AuthAdminService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseBackupService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
DuplicateService,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -36,28 +36,96 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should return true if enabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: '' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: { action: MaintenanceAction.Start },
|
||||
});
|
||||
|
||||
await expect(sut.getMaintenanceMode()).resolves.toEqual({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('startMaintenance', () => {
|
||||
it('should set maintenance mode and return a secret', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
|
||||
await expect(sut.startMaintenance('admin')).resolves.toMatchObject({
|
||||
await expect(
|
||||
sut.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
'admin',
|
||||
),
|
||||
).resolves.toMatchObject({
|
||||
jwt: expect.any(String),
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AppRestart', {
|
||||
@@ -78,7 +146,13 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should generate a login url with JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.createLoginUrl({
|
||||
|
||||
@@ -1,10 +1,20 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, signMaintenanceJwt } from 'src/utils/maintenance';
|
||||
import {
|
||||
createMaintenanceLoginUrl,
|
||||
detectPriorInstall,
|
||||
generateMaintenanceSecret,
|
||||
signMaintenanceJwt,
|
||||
} from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -18,9 +28,25 @@ export class MaintenanceService extends BaseService {
|
||||
.then((state) => state ?? { isMaintenanceMode: false });
|
||||
}
|
||||
|
||||
async startMaintenance(username: string): Promise<{ jwt: string }> {
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return {
|
||||
active: false,
|
||||
action: MaintenanceAction.End,
|
||||
};
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async startMaintenance(action: SetMaintenanceModeDto, username: string): Promise<{ jwt: string }> {
|
||||
const secret = generateMaintenanceSecret();
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, { isMaintenanceMode: true, secret });
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action,
|
||||
});
|
||||
|
||||
await this.eventRepository.emit('AppRestart', { isMaintenanceMode: true });
|
||||
|
||||
return {
|
||||
@@ -30,6 +56,20 @@ export class MaintenanceService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
async startRestoreFlow(): Promise<{ jwt: string }> {
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
}
|
||||
|
||||
return this.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
},
|
||||
'admin',
|
||||
);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AppRestart', server: true })
|
||||
onRestart(): void {
|
||||
this.appRepository.exitApp();
|
||||
|
||||
@@ -4,7 +4,16 @@ import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { defaults } from 'src/config';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
ExifOrientation,
|
||||
ImmichWorker,
|
||||
JobName,
|
||||
JobStatus,
|
||||
SourceType,
|
||||
} from 'src/enum';
|
||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
@@ -15,17 +24,24 @@ import { tagStub } from 'test/fixtures/tag.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const removeNonSidecarFiles = (asset: any) => {
|
||||
return {
|
||||
...asset,
|
||||
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
|
||||
};
|
||||
};
|
||||
|
||||
const forSidecarJob = (
|
||||
asset: {
|
||||
id?: string;
|
||||
originalPath?: string;
|
||||
sidecarPath?: string | null;
|
||||
files?: { id: string; type: AssetFileType; path: string }[];
|
||||
} = {},
|
||||
) => {
|
||||
return {
|
||||
id: factory.uuid(),
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
sidecarPath: null,
|
||||
files: [],
|
||||
...asset,
|
||||
};
|
||||
};
|
||||
@@ -166,7 +182,7 @@ describe(MetadataService.name, () => {
|
||||
it('should handle a date in a sidecar file', async () => {
|
||||
const originalDate = new Date('2023-11-21T16:13:17.517Z');
|
||||
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -185,7 +201,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file modification date when missing exif and earlier than creation date', async () => {
|
||||
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -211,7 +227,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file creation date when missing exif and earlier than modification date', async () => {
|
||||
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -234,7 +250,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
|
||||
process.env.TZ = 'America/Los_Angeles';
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -252,7 +268,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.image.fileModifiedAt,
|
||||
@@ -305,7 +321,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should apply reverse geocoding', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
|
||||
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
@@ -334,7 +350,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should discard latitude and longitude on null island', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mockReadTags({
|
||||
GPSLatitude: 0,
|
||||
GPSLongitude: 0,
|
||||
@@ -346,7 +362,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -356,7 +372,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -376,7 +392,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a string', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -386,7 +402,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -396,7 +412,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -407,7 +423,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchal tags from Keywords', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent/Child' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -426,7 +442,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore Keywords when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -445,7 +461,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from HierarchicalSubject', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -466,7 +482,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -1030,8 +1046,15 @@ describe(MetadataService.name, () => {
|
||||
it('should prefer Duration from exif over sidecar', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
...assetStub.image,
|
||||
sidecarPath: '/path/to/something',
|
||||
files: [
|
||||
{
|
||||
id: 'some-id',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/something',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReadTags({ Duration: 123 }, { Duration: 456 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -1536,18 +1559,25 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .jpg.xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', files: [] });
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.jpg.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
|
||||
@@ -1555,33 +1585,44 @@ describe(MetadataService.name, () => {
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should unset sidecar path if file does not exist anymore', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
|
||||
it('should unset sidecar path if file no longer exist', async () => {
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(false);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith({ assetId: asset.id, type: AssetFileType.Sidecar });
|
||||
});
|
||||
|
||||
it('should do nothing if the sidecar file still exists', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
|
||||
|
||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarWrite', () => {
|
||||
it('should skip assets that do not exist anymore', async () => {
|
||||
it('should skip assets that no longer exist', async () => {
|
||||
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
|
||||
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
|
||||
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
|
||||
@@ -1610,7 +1651,7 @@ describe(MetadataService.name, () => {
|
||||
dateTimeOriginal: date,
|
||||
}),
|
||||
).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, {
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, {
|
||||
Description: description,
|
||||
ImageDescription: description,
|
||||
DateTimeOriginal: date,
|
||||
|
||||
@@ -8,9 +8,10 @@ import { constants } from 'node:fs/promises';
|
||||
import { join, parse } from 'node:path';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { Asset, AssetFace } from 'src/database';
|
||||
import { Asset, AssetFace, AssetFile } from 'src/database';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
DatabaseLock,
|
||||
@@ -29,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { PersonTable } from 'src/schema/tables/person.table';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { getAssetFiles } from 'src/utils/asset.util';
|
||||
import { isAssetChecksumConstraint } from 'src/utils/database';
|
||||
import { isFaceImportEnabled } from 'src/utils/misc';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
@@ -359,17 +361,21 @@ export class MetadataService extends BaseService {
|
||||
break;
|
||||
}
|
||||
|
||||
const isChanged = sidecarPath !== asset.sidecarPath;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const isChanged = sidecarPath !== sidecarFile?.path;
|
||||
|
||||
this.logger.debug(
|
||||
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
`Sidecar check found old=${sidecarFile?.path}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
|
||||
if (!isChanged) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
await this.assetRepository.update({ id: asset.id, sidecarPath });
|
||||
await (sidecarPath === null
|
||||
? this.assetRepository.deleteFile({ assetId: asset.id, type: AssetFileType.Sidecar })
|
||||
: this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }));
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
@@ -394,7 +400,9 @@ export class MetadataService extends BaseService {
|
||||
|
||||
const tagsList = (asset.tags || []).map((tag) => tag.value);
|
||||
|
||||
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
|
||||
|
||||
const exif = _.omitBy(
|
||||
<Tags>{
|
||||
Description: description,
|
||||
@@ -414,18 +422,19 @@ export class MetadataService extends BaseService {
|
||||
|
||||
await this.metadataRepository.writeTags(sidecarPath, exif);
|
||||
|
||||
if (!asset.sidecarPath) {
|
||||
await this.assetRepository.update({ id, sidecarPath });
|
||||
if (asset.files.length === 0) {
|
||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
|
||||
}
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
|
||||
private getSidecarCandidates({ files, originalPath }: { files: AssetFile[]; originalPath: string }) {
|
||||
const candidates: string[] = [];
|
||||
|
||||
if (sidecarPath) {
|
||||
candidates.push(sidecarPath);
|
||||
const { sidecarFile } = getAssetFiles(files);
|
||||
if (sidecarFile?.path) {
|
||||
candidates.push(sidecarFile.path);
|
||||
}
|
||||
|
||||
const assetPath = parse(originalPath);
|
||||
@@ -456,14 +465,12 @@ export class MetadataService extends BaseService {
|
||||
return { width, height };
|
||||
}
|
||||
|
||||
private async getExifTags(asset: {
|
||||
originalPath: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
}): Promise<ImmichTags> {
|
||||
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
|
||||
this.metadataRepository.readTags(asset.originalPath),
|
||||
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
|
||||
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
|
||||
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
|
||||
]);
|
||||
|
||||
|
||||
@@ -247,9 +247,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeFilters(workflowFilters: WorkflowFilter[], context: WorkflowContext): Promise<boolean> {
|
||||
for (const workflowFilter of workflowFilters) {
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.pluginFilterId);
|
||||
if (!filter) {
|
||||
this.logger.error(`Filter ${workflowFilter.filterId} not found`);
|
||||
this.logger.error(`Filter ${workflowFilter.pluginFilterId} not found`);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -291,9 +291,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeActions(workflowActions: WorkflowAction[], context: WorkflowContext): Promise<void> {
|
||||
for (const workflowAction of workflowActions) {
|
||||
const action = await this.pluginRepository.getAction(workflowAction.actionId);
|
||||
const action = await this.pluginRepository.getAction(workflowAction.pluginActionId);
|
||||
if (!action) {
|
||||
throw new Error(`Action ${workflowAction.actionId} not found`);
|
||||
throw new Error(`Action ${workflowAction.pluginActionId} not found`);
|
||||
}
|
||||
|
||||
const pluginInstance = this.loadedPlugins.get(action.pluginId);
|
||||
|
||||
@@ -6,10 +6,20 @@ import sanitize from 'sanitize-filename';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
|
||||
import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetPathType,
|
||||
AssetType,
|
||||
DatabaseLock,
|
||||
JobName,
|
||||
JobStatus,
|
||||
QueueName,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobOf, StorageAsset } from 'src/types';
|
||||
import { getAssetFile } from 'src/utils/asset.util';
|
||||
import { getLivePhotoMotionFilename } from 'src/utils/file';
|
||||
|
||||
const storageTokens = {
|
||||
@@ -196,7 +206,7 @@ export class StorageTemplateService extends BaseService {
|
||||
}
|
||||
|
||||
return this.databaseRepository.withLock(DatabaseLock.StorageTemplateMigration, async () => {
|
||||
const { id, sidecarPath, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const { id, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const oldPath = originalPath;
|
||||
const newPath = await this.getTemplatePath(asset, metadata);
|
||||
|
||||
@@ -213,6 +223,8 @@ export class StorageTemplateService extends BaseService {
|
||||
newPath,
|
||||
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
|
||||
});
|
||||
|
||||
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar)?.path;
|
||||
if (sidecarPath) {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
|
||||
@@ -78,13 +78,13 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
private async validateAndMapFilters(
|
||||
filters: Array<{ filterId: string; filterConfig?: any }>,
|
||||
filters: Array<{ pluginFilterId: string; filterConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of filters) {
|
||||
const filter = await this.pluginRepository.getFilter(dto.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(dto.pluginFilterId);
|
||||
if (!filter) {
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.filterId}`);
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.pluginFilterId}`);
|
||||
}
|
||||
|
||||
if (!filter.supportedContexts.includes(requiredContext)) {
|
||||
@@ -95,20 +95,20 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return filters.map((dto, index) => ({
|
||||
filterId: dto.filterId,
|
||||
pluginFilterId: dto.pluginFilterId,
|
||||
filterConfig: dto.filterConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
}
|
||||
|
||||
private async validateAndMapActions(
|
||||
actions: Array<{ actionId: string; actionConfig?: any }>,
|
||||
actions: Array<{ pluginActionId: string; actionConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of actions) {
|
||||
const action = await this.pluginRepository.getAction(dto.actionId);
|
||||
const action = await this.pluginRepository.getAction(dto.pluginActionId);
|
||||
if (!action) {
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.actionId}`);
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.pluginActionId}`);
|
||||
}
|
||||
if (!action.supportedContexts.includes(requiredContext)) {
|
||||
throw new BadRequestException(
|
||||
@@ -118,7 +118,7 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return actions.map((dto, index) => ({
|
||||
actionId: dto.actionId,
|
||||
pluginActionId: dto.pluginActionId,
|
||||
actionConfig: dto.actionConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { VECTOR_EXTENSIONS } from 'src/constants';
|
||||
import { Asset } from 'src/database';
|
||||
import { Asset, AssetFile } from 'src/database';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
AssetOrder,
|
||||
AssetType,
|
||||
@@ -475,8 +476,8 @@ export type StorageAsset = {
|
||||
fileCreatedAt: Date;
|
||||
originalPath: string;
|
||||
originalFileName: string;
|
||||
sidecarPath: string | null;
|
||||
fileSizeInByte: number | null;
|
||||
files: AssetFile[];
|
||||
};
|
||||
|
||||
export type OnThisDayData = { year: number };
|
||||
@@ -487,7 +488,9 @@ export interface MemoryData {
|
||||
|
||||
export type VersionCheckMetadata = { checkedAt: string; releaseVersion: string };
|
||||
export type SystemFlags = { mountChecks: Record<StorageFolder, boolean> };
|
||||
export type MaintenanceModeState = { isMaintenanceMode: true; secret: string } | { isMaintenanceMode: false };
|
||||
export type MaintenanceModeState =
|
||||
| { isMaintenanceMode: true; secret: string; action: SetMaintenanceModeDto }
|
||||
| { isMaintenanceMode: false };
|
||||
export type MemoriesState = {
|
||||
/** memories have already been created through this date */
|
||||
lastOnThisDayDate: string;
|
||||
|
||||
@@ -21,6 +21,7 @@ export const getAssetFiles = (files: AssetFile[]) => ({
|
||||
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
|
||||
previewFile: getAssetFile(files, AssetFileType.Preview),
|
||||
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
|
||||
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
|
||||
});
|
||||
|
||||
export const addAssets = async (
|
||||
|
||||
385
server/src/utils/database-backups.ts
Normal file
385
server/src/utils/database-backups.ts
Normal file
@@ -0,0 +1,385 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { debounce } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import path, { basename, join } from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { CacheControl, StorageFolder } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function isValidDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^[\d\w-.]+\.sql(?:\.gz)?$/);
|
||||
}
|
||||
|
||||
export function isValidDatabaseRoutineBackupName(filename: string) {
|
||||
const oldBackupStyle = filename.match(/^immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = filename.match(/^immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
}
|
||||
|
||||
export function isFailedDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^immich-db-backup-.*\.sql\.gz\.tmp$/);
|
||||
}
|
||||
|
||||
type BackupRepos = {
|
||||
logger: LoggingRepository;
|
||||
storage: StorageRepository;
|
||||
config: ConfigRepository;
|
||||
process: ProcessRepository;
|
||||
database: DatabaseRepository;
|
||||
};
|
||||
|
||||
export class UnsupportedPostgresError extends Error {
|
||||
constructor(databaseVersion: string) {
|
||||
super(`Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildPostgresLaunchArguments(
|
||||
{ logger, config, database }: Pick<BackupRepos, 'logger' | 'config' | 'database'>,
|
||||
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
|
||||
): Promise<{
|
||||
bin: string;
|
||||
args: string[];
|
||||
databasePassword: string;
|
||||
databaseVersion: string;
|
||||
databaseMajorVersion?: number;
|
||||
}> {
|
||||
const {
|
||||
database: { config: databaseConfig },
|
||||
} = config.getEnv();
|
||||
const isUrlConnection = databaseConfig.connectionType === 'url';
|
||||
|
||||
const databaseVersion = await database.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
const args: string[] = [];
|
||||
|
||||
if (isUrlConnection) {
|
||||
if (bin !== 'pg_dump') {
|
||||
args.push('--dbname');
|
||||
}
|
||||
|
||||
let url = databaseConfig.url;
|
||||
if (URL.canParse(databaseConfig.url)) {
|
||||
const parsedUrl = new URL(databaseConfig.url);
|
||||
// remove known bad parameters
|
||||
parsedUrl.searchParams.delete('uselibpqcompat');
|
||||
url = parsedUrl.toString();
|
||||
}
|
||||
|
||||
args.push(url);
|
||||
} else {
|
||||
args.push(
|
||||
'--username',
|
||||
databaseConfig.username,
|
||||
'--host',
|
||||
databaseConfig.host,
|
||||
'--port',
|
||||
databaseConfig.port.toString(),
|
||||
);
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dumpall': {
|
||||
args.push('--database');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push('--dbname');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
args.push(databaseConfig.database);
|
||||
}
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dump':
|
||||
case 'pg_dumpall': {
|
||||
args.push('--clean', '--if-exists');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push(
|
||||
// don't commit any transaction on failure
|
||||
'--single-transaction',
|
||||
// exit with non-zero code on error
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
// used for progress monitoring
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
throw new UnsupportedPostgresError(databaseVersion);
|
||||
}
|
||||
|
||||
return {
|
||||
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
|
||||
args,
|
||||
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
|
||||
databaseVersion,
|
||||
databaseMajorVersion,
|
||||
};
|
||||
}
|
||||
|
||||
export async function createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, ...pgRepos }: BackupRepos,
|
||||
filenamePrefix: string = '',
|
||||
): Promise<void> {
|
||||
logger.debug(`Database Backup Started`);
|
||||
|
||||
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, ...pgRepos },
|
||||
'pg_dump',
|
||||
);
|
||||
|
||||
logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
const backupFilePath = join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
|
||||
try {
|
||||
const pgdump = processRepository.createSpawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = processRepository.createSpawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = storage.createWriteStream(backupFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
await storage.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
} catch (error) {
|
||||
logger.error(`Database Backup Failure: ${error}`);
|
||||
await storage
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.log(`Database Backup Success`);
|
||||
}
|
||||
|
||||
export async function restoreDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, ...pgRepos }: BackupRepos,
|
||||
filename: string,
|
||||
progressCb?: (action: 'backup' | 'restore', progress: number) => void,
|
||||
): Promise<void> {
|
||||
logger.debug(`Database Restore Started`);
|
||||
|
||||
let complete = false;
|
||||
try {
|
||||
if (!isValidDatabaseBackupName(filename)) {
|
||||
throw new Error('Invalid backup file format!');
|
||||
}
|
||||
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
await storage.stat(backupFilePath); // => check file exists
|
||||
|
||||
const { bin, args, databasePassword, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, ...pgRepos },
|
||||
'psql',
|
||||
);
|
||||
|
||||
progressCb?.('backup', 0.05);
|
||||
|
||||
await createDatabaseBackup({ logger, storage, process: processRepository, ...pgRepos }, 'restore-point-');
|
||||
|
||||
logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
let inputStream: Readable;
|
||||
if (backupFilePath.endsWith('.gz')) {
|
||||
const fileStream = storage.createPlainReadStream(backupFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
} else {
|
||||
inputStream = storage.createPlainReadStream(backupFilePath);
|
||||
}
|
||||
|
||||
async function* sql() {
|
||||
yield `
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO postgres;
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
`;
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql());
|
||||
const psql = processRepository.createSpawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('restore', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
} catch (error) {
|
||||
logger.error(`Database Restore Failure: ${error}`);
|
||||
throw error;
|
||||
} finally {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
logger.log(`Database Restore Success`);
|
||||
}
|
||||
|
||||
export async function deleteDatabaseBackup({ storage }: Pick<BackupRepos, 'storage'>, files: string[]): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
|
||||
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
await Promise.all(files.map((filename) => storage.unlink(path.join(backupsFolder, filename))));
|
||||
}
|
||||
|
||||
export async function listDatabaseBackups({ storage }: Pick<BackupRepos, 'storage'>): Promise<string[]> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await storage.readdir(backupsFolder);
|
||||
return files
|
||||
.filter((fn) => isValidDatabaseBackupName(fn))
|
||||
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
|
||||
.toReversed();
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBackup(
|
||||
{ storage }: Pick<BackupRepos, 'storage'>,
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const fn = basename(file.originalname);
|
||||
if (!isValidDatabaseBackupName(fn)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(backupsFolder, `uploaded-${fn}`);
|
||||
await storage.createOrOverwriteFile(path, file.buffer);
|
||||
}
|
||||
|
||||
export function downloadDatabaseBackup(fileName: string) {
|
||||
if (!isValidDatabaseBackupName(fileName)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
|
||||
|
||||
return {
|
||||
path,
|
||||
fileName,
|
||||
cacheControl: CacheControl.PrivateWithoutCache,
|
||||
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
|
||||
};
|
||||
}
|
||||
|
||||
function createSqlProgressStreams(cb: (progress: number) => void) {
|
||||
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
|
||||
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
|
||||
|
||||
let readingStdin = false;
|
||||
let sequenceIdx = 0;
|
||||
|
||||
let linesSent = 0;
|
||||
let linesProcessed = 0;
|
||||
|
||||
const startedAt = +Date.now();
|
||||
const cbDebounced = debounce(
|
||||
() => {
|
||||
const progress = source.writableEnded
|
||||
? Math.min(1, linesProcessed / linesSent)
|
||||
: // progress simulation while we're in an indeterminate state
|
||||
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
|
||||
cb(progress);
|
||||
},
|
||||
100,
|
||||
{
|
||||
maxWait: 100,
|
||||
},
|
||||
);
|
||||
|
||||
let lastByte = -1;
|
||||
const source = new PassThrough({
|
||||
transform(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (!readingStdin && byte === 10 && lastByte !== 10) {
|
||||
linesSent += 1;
|
||||
}
|
||||
|
||||
lastByte = byte;
|
||||
|
||||
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
|
||||
if (sequence[sequenceIdx] === byte) {
|
||||
sequenceIdx += 1;
|
||||
|
||||
if (sequence.length === sequenceIdx) {
|
||||
sequenceIdx = 0;
|
||||
readingStdin = !readingStdin;
|
||||
}
|
||||
} else {
|
||||
sequenceIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (byte === 10) {
|
||||
linesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
return [source, sink];
|
||||
}
|
||||
@@ -42,7 +42,7 @@ const cacheControlHeaders: Record<CacheControl, string | null> = {
|
||||
export const sendFile = async (
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
handler: () => Promise<ImmichFileResponse>,
|
||||
handler: () => Promise<ImmichFileResponse> | ImmichFileResponse,
|
||||
logger: LoggingRepository,
|
||||
): Promise<void> => {
|
||||
// promisified version of 'res.sendFile' for cleaner async handling
|
||||
|
||||
@@ -2,10 +2,14 @@ import { createAdapter } from '@socket.io/redis-adapter';
|
||||
import Redis from 'ioredis';
|
||||
import { SignJWT } from 'jose';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { join } from 'node:path';
|
||||
import { Server as SocketIO } from 'socket.io';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAuthDto, MaintenanceDetectInstallResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { AppRestartEvent } from 'src/repositories/event.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function sendOneShotAppRestart(state: AppRestartEvent): void {
|
||||
const server = new SocketIO();
|
||||
@@ -72,3 +76,41 @@ export async function signMaintenanceJwt(secret: string, data: MaintenanceAuthDt
|
||||
export function generateMaintenanceSecret(): string {
|
||||
return randomBytes(64).toString('hex');
|
||||
}
|
||||
|
||||
export async function detectPriorInstall(
|
||||
storageRepository: StorageRepository,
|
||||
): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return {
|
||||
storage: await Promise.all(
|
||||
Object.values(StorageFolder).map(async (folder) => {
|
||||
const path = StorageCore.getBaseFolder(folder);
|
||||
const files = await storageRepository.readdir(path);
|
||||
const fn = join(StorageCore.getBaseFolder(folder), '.immich');
|
||||
|
||||
let readable = false,
|
||||
writable = false;
|
||||
|
||||
try {
|
||||
await storageRepository.readFile(fn);
|
||||
readable = true;
|
||||
|
||||
try {
|
||||
await storageRepository.overwriteFile(fn, Buffer.from(`${Date.now()}`));
|
||||
writable = true;
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
return {
|
||||
folder,
|
||||
readable,
|
||||
writable,
|
||||
files: files.filter((fn) => fn !== '.immich').length,
|
||||
};
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -96,6 +96,16 @@ export class UUIDAssetIDParamDto {
|
||||
assetId!: string;
|
||||
}
|
||||
|
||||
export class FilenameParamDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
@ApiProperty({ format: 'string' })
|
||||
@Matches(/^[a-zA-Z0-9_\-.]+$/, {
|
||||
message: 'Filename contains invalid characters',
|
||||
})
|
||||
filename!: string;
|
||||
}
|
||||
|
||||
type PinCodeOptions = { optional?: boolean } & OptionalOptions;
|
||||
export const PinCode = (options?: PinCodeOptions & ApiPropertyOptions) => {
|
||||
const { optional, nullable, emptyToNull, ...apiPropertyOptions } = {
|
||||
|
||||
@@ -12,12 +12,11 @@ async function bootstrap() {
|
||||
|
||||
const app = await NestFactory.create<NestExpressApplication>(MaintenanceModule, { bufferLogs: true });
|
||||
app.get(AppRepository).setCloseFn(() => app.close());
|
||||
|
||||
void configureExpress(app, {
|
||||
permitSwaggerWrite: false,
|
||||
ssr: MaintenanceWorkerService,
|
||||
});
|
||||
|
||||
void app.get(MaintenanceWorkerService).logSecret();
|
||||
}
|
||||
|
||||
bootstrap().catch((error) => {
|
||||
|
||||
43
server/test/fixtures/asset.stub.ts
vendored
43
server/test/fixtures/asset.stub.ts
vendored
@@ -24,6 +24,18 @@ const fullsizeFile: AssetFile = {
|
||||
path: '/uploads/user-id/fullsize/path.webp',
|
||||
};
|
||||
|
||||
const sidecarFileWithExt: AssetFile = {
|
||||
id: 'sidecar-with-ext',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/original/path.ext.xmp',
|
||||
};
|
||||
|
||||
const sidecarFileWithoutExt: AssetFile = {
|
||||
id: 'sidecar-without-ext',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/original/path.xmp',
|
||||
};
|
||||
|
||||
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
|
||||
|
||||
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
|
||||
@@ -51,8 +63,8 @@ export const assetStub = {
|
||||
fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'),
|
||||
originalPath: '/original/path.jpg',
|
||||
originalFileName: 'IMG_123.jpg',
|
||||
sidecarPath: null,
|
||||
fileSizeInByte: 12_345,
|
||||
files: [],
|
||||
...asset,
|
||||
}),
|
||||
noResizePath: Object.freeze({
|
||||
@@ -81,7 +93,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
exifInfo: {} as Exif,
|
||||
sidecarPath: null,
|
||||
deletedAt: null,
|
||||
isExternal: false,
|
||||
duplicateId: null,
|
||||
@@ -117,7 +128,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'IMG_456.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
isExternal: false,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 123_000,
|
||||
@@ -157,7 +167,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -194,7 +203,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 1000,
|
||||
@@ -243,7 +251,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -285,7 +292,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -328,7 +334,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -367,7 +372,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -409,7 +413,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -448,7 +451,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -490,7 +492,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -526,7 +527,6 @@ export const assetStub = {
|
||||
livePhotoVideoId: null,
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 100_000,
|
||||
exifImageHeight: 2160,
|
||||
@@ -553,6 +553,7 @@ export const assetStub = {
|
||||
fileSizeInByte: 100_000,
|
||||
timeZone: `America/New_York`,
|
||||
},
|
||||
files: [] as AssetFile[],
|
||||
libraryId: null,
|
||||
visibility: AssetVisibility.Hidden,
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
|
||||
@@ -573,7 +574,7 @@ export const assetStub = {
|
||||
files,
|
||||
faces: [] as AssetFace[],
|
||||
visibility: AssetVisibility.Timeline,
|
||||
} as MapAsset & { faces: AssetFace[] }),
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
|
||||
|
||||
livePhotoWithOriginalFileName: Object.freeze({
|
||||
id: 'live-photo-still-asset',
|
||||
@@ -589,10 +590,11 @@ export const assetStub = {
|
||||
fileSizeInByte: 25_000,
|
||||
timeZone: `America/New_York`,
|
||||
},
|
||||
files: [] as AssetFile[],
|
||||
libraryId: null,
|
||||
faces: [] as AssetFace[],
|
||||
visibility: AssetVisibility.Timeline,
|
||||
} as MapAsset & { faces: AssetFace[] }),
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
|
||||
|
||||
withLocation: Object.freeze({
|
||||
id: 'asset-with-favorite-id',
|
||||
@@ -605,7 +607,6 @@ export const assetStub = {
|
||||
deviceId: 'device-id',
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
originalPath: '/original/path.ext',
|
||||
sidecarPath: null,
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
thumbhash: null,
|
||||
@@ -652,7 +653,7 @@ export const assetStub = {
|
||||
thumbhash: null,
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
files: [previewFile, sidecarFileWithExt],
|
||||
encodedVideoPath: null,
|
||||
createdAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
@@ -665,7 +666,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: '/original/path.ext.xmp',
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -688,7 +688,7 @@ export const assetStub = {
|
||||
thumbhash: null,
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
files: [previewFile, sidecarFileWithoutExt],
|
||||
encodedVideoPath: null,
|
||||
createdAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
@@ -701,7 +701,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: '/original/path.xmp',
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -734,7 +733,6 @@ export const assetStub = {
|
||||
livePhotoVideoId: null,
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 100_000,
|
||||
} as Exif,
|
||||
@@ -776,7 +774,6 @@ export const assetStub = {
|
||||
originalFileName: 'photo.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -812,7 +809,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.dng',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
@@ -853,7 +849,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.hif',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { JobName, SharedLinkType } from 'src/enum';
|
||||
import { AssetFileType, JobName, SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
@@ -184,7 +184,15 @@ describe(AssetService.name, () => {
|
||||
jobRepo.queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, sidecarPath: '/path/to/my/sidecar.xmp' });
|
||||
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newAssetFile({
|
||||
assetId: oldAsset.id,
|
||||
path: '/path/to/my/sidecar.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user