mirror of
https://github.com/immich-app/immich.git
synced 2025-12-11 07:11:05 -08:00
Compare commits
39 Commits
feat/datab
...
workflow-u
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ed646178e | ||
|
|
3d771127d2 | ||
|
|
5156438336 | ||
|
|
76ec9e3ebf | ||
|
|
1e238e7a48 | ||
|
|
63e38f347e | ||
|
|
6222c4e97f | ||
|
|
1c64d21148 | ||
|
|
fe931052e5 | ||
|
|
4493f30b78 | ||
|
|
75a7c9c06c | ||
|
|
ae8f5a6673 | ||
|
|
31f2c7b505 | ||
|
|
5537a869ea | ||
|
|
25ac9975e6 | ||
|
|
5eccffc084 | ||
|
|
ba6687dde9 | ||
|
|
288ba44825 | ||
|
|
bbba1bfe8c | ||
|
|
4be9a5ebf8 | ||
|
|
bd4355a75f | ||
|
|
d41921247b | ||
|
|
853a024f0f | ||
|
|
4fe494776e | ||
|
|
76b4adf276 | ||
|
|
75dde0d076 | ||
|
|
cffb68d1c4 | ||
|
|
45f68f73a9 | ||
|
|
290de9d27c | ||
|
|
84b031bbe4 | ||
|
|
7cce100e96 | ||
|
|
380d03476e | ||
|
|
1f25422958 | ||
|
|
2fe36e77d9 | ||
|
|
89360e7d8d | ||
|
|
8e5d21a2c0 | ||
|
|
69779f22f3 | ||
|
|
7eecfc43df | ||
|
|
272ad7c773 |
2
.github/package.json
vendored
2
.github/package.json
vendored
@@ -4,6 +4,6 @@
|
||||
"format:fix": "prettier --write ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.5.3"
|
||||
"prettier": "^3.7.4"
|
||||
}
|
||||
}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,7 +7,7 @@
|
||||
.idea
|
||||
|
||||
docker/upload
|
||||
docker/library*
|
||||
docker/library
|
||||
uploads
|
||||
coverage
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"eslint-plugin-unicorn": "^62.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
|
||||
@@ -41,7 +41,7 @@ By default, Immich will keep the last 14 database dumps and create a new dump ev
|
||||
|
||||
#### Trigger Dump
|
||||
|
||||
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/queues).
|
||||
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
|
||||
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
|
||||
This dumps will count towards the last `X` dumps that will be kept based on your settings.
|
||||
|
||||
@@ -21,6 +21,9 @@ server {
|
||||
# allow large file uploads
|
||||
client_max_body_size 50000M;
|
||||
|
||||
# disable buffering uploads to prevent OOM on reverse proxy server and make uploads twice as fast (no pause)
|
||||
proxy_request_buffering off;
|
||||
|
||||
# Set headers
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
|
||||
@@ -48,7 +48,6 @@ You can access the web from `http://your-machine-ip:3000` or `http://localhost:3
|
||||
**Notes:**
|
||||
|
||||
- The "web" development container runs with uid 1000. If that uid does not have read/write permissions on the mounted volumes, you may encounter errors
|
||||
- In case of rootless docker setup, you need to use root within the container, otherwise you will encounter read/write permission related errors, see comments in `docker/docker-compose.dev.yml`.
|
||||
|
||||
#### Connect web to a remote backend
|
||||
|
||||
|
||||
@@ -1222,4 +1222,4 @@ Feel free to make a feature request if there's a model you want to use that we d
|
||||
[huggingface-clip]: https://huggingface.co/collections/immich-app/clip-654eaefb077425890874cd07
|
||||
[huggingface-multilingual-clip]: https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7
|
||||
[smart-search-settings]: https://my.immich.app/admin/system-settings?isOpen=machine-learning+smart-search
|
||||
[job-status-page]: https://my.immich.app/admin/jobs-status
|
||||
[job-status-page]: https://my.immich.app/admin/queues
|
||||
|
||||
@@ -53,7 +53,7 @@ Version mismatches between both hosts may cause bugs and instability, so remembe
|
||||
|
||||
Adding a new URL to the settings is recommended over replacing the existing URL. This is because it will allow machine learning tasks to be processed successfully when the remote server is down by falling back to the local machine learning container. If you do not want machine learning tasks to be processed locally when the remote server is not available, you can instead replace the existing URL and only provide the remote container's URL. If doing this, you can remove the `immich-machine-learning` section of the local `docker-compose.yml` file to save resources, as this service will never be used.
|
||||
|
||||
Do note that this will mean that Smart Search and Face Detection jobs will fail to be processed when the remote instance is not available. This in turn means that tasks dependent on these features—Duplicate Detection and Facial Recognition—will not run for affected assets. If this occurs, you must manually click the _Missing_ button next to Smart Search and Face Detection in the [Job Status](http://my.immich.app/admin/jobs-status) page for the jobs to be retried.
|
||||
Do note that this will mean that Smart Search and Face Detection jobs will fail to be processed when the remote instance is not available. This in turn means that tasks dependent on these features—Duplicate Detection and Facial Recognition—will not run for affected assets. If this occurs, you must manually click the _Missing_ button next to Smart Search and Face Detection in the [Job Status](http://my.immich.app/admin/queues) page for the jobs to be retried.
|
||||
|
||||
## Load balancing
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"@docusaurus/module-type-aliases": "~3.9.0",
|
||||
"@docusaurus/tsconfig": "^3.7.0",
|
||||
"@docusaurus/types": "^3.7.0",
|
||||
"prettier": "^3.2.4",
|
||||
"prettier": "^3.7.4",
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"browserslist": {
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
"oidc-provider": "^9.0.0",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.34.5",
|
||||
"socket.io-client": "^4.7.4",
|
||||
|
||||
@@ -1,267 +0,0 @@
|
||||
import { LoginResponseDto, ManualJobName } from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/admin/database-backups', () => {
|
||||
let cookie: string | undefined;
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
describe('GET /', async () => {
|
||||
it('should succeed and be empty', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
backups: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should contain a created backup', async () => {
|
||||
await utils.createJob(admin.accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'backupDatabase');
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [expect.stringMatching(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/)],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /', async () => {
|
||||
it('should delete backup', async () => {
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete(`/admin/database-backups`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ backups: [filename] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const { status: listStatus, body } = await request(app)
|
||||
.get('/admin/database-backups')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(listStatus).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
backups: [],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database flow
|
||||
|
||||
describe.sequential('POST /start-restore', () => {
|
||||
afterAll(async () => {
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({ action: 'end' });
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
it.sequential('should not work when the server is configured', async () => {
|
||||
const { status, body } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('The server already has an admin'));
|
||||
});
|
||||
|
||||
it.sequential('should enter maintenance mode in "database restore mode"', async () => {
|
||||
await utils.resetDatabase(); // reset database before running this test
|
||||
|
||||
const { status, headers } = await request(app).post('/admin/database-backups/start-restore').send();
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// => action: restore database
|
||||
|
||||
describe.sequential('POST /backups/restore', () => {
|
||||
beforeAll(async () => {
|
||||
await utils.disconnectDatabase();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await utils.connectDatabase();
|
||||
});
|
||||
|
||||
it.sequential('should restore a backup', { timeout: 60_000 }, async () => {
|
||||
const filename = await utils.createBackup(admin.accessToken);
|
||||
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: filename,
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
const { status: status2, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
}),
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 60_000,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
});
|
||||
|
||||
it.sequential('fail to restore a corrupted backup', { timeout: 60_000 }, async () => {
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'restore_database',
|
||||
restoreBackupFilename: 'development-corrupted.sql.gz',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
return body;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
},
|
||||
)
|
||||
.toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
|
||||
const { status: status2, body: body2 } = await request(app)
|
||||
.get('/admin/maintenance/status')
|
||||
.set('cookie', cookie!)
|
||||
.send({ token: 'token' });
|
||||
expect(status2).toBe(200);
|
||||
expect(body2).toEqual(
|
||||
expect.objectContaining({
|
||||
active: true,
|
||||
action: 'restore_database',
|
||||
error: expect.stringContaining('IM CORRUPTED'),
|
||||
}),
|
||||
);
|
||||
|
||||
await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
await utils.poll(
|
||||
() => request(app).get('/server/config'),
|
||||
({ status, body }) => status === 200 && !body.maintenanceMode,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,7 +14,6 @@ describe('/admin/maintenance', () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
});
|
||||
|
||||
// => outside of maintenance mode
|
||||
@@ -27,17 +26,6 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to always indicate we are not in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should not work out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({ token: 'token' });
|
||||
@@ -51,7 +39,6 @@ describe('/admin/maintenance', () => {
|
||||
describe.sequential('POST /', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance').send({
|
||||
active: false,
|
||||
action: 'end',
|
||||
});
|
||||
expect(status).toBe(401);
|
||||
@@ -82,7 +69,6 @@ describe('/admin/maintenance', () => {
|
||||
.send({
|
||||
action: 'start',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
@@ -93,13 +79,12 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
@@ -117,17 +102,6 @@ describe('/admin/maintenance', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /status', async () => {
|
||||
it('to indicate we are in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/admin/maintenance/status').send({ token: 'token' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
active: true,
|
||||
action: 'start',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should fail without cookie or token in body', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({});
|
||||
@@ -184,13 +158,12 @@ describe('/admin/maintenance', () => {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 500,
|
||||
timeout: 10_000,
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
|
||||
@@ -6,9 +6,7 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
JobCreateDto,
|
||||
MaintenanceAction,
|
||||
ManualJobName,
|
||||
MetadataSearchDto,
|
||||
Permission,
|
||||
PersonCreateDto,
|
||||
@@ -23,7 +21,6 @@ import {
|
||||
checkExistingAssets,
|
||||
createAlbum,
|
||||
createApiKey,
|
||||
createJob,
|
||||
createLibrary,
|
||||
createPartner,
|
||||
createPerson,
|
||||
@@ -31,12 +28,10 @@ import {
|
||||
createStack,
|
||||
createUserAdmin,
|
||||
deleteAssets,
|
||||
deleteDatabaseBackup,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
getQueuesLegacy,
|
||||
listDatabaseBackups,
|
||||
login,
|
||||
runQueueCommandLegacy,
|
||||
scanLibrary,
|
||||
@@ -57,15 +52,11 @@ import {
|
||||
import { BrowserContext } from '@playwright/test';
|
||||
import { exec, spawn } from 'node:child_process';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createWriteStream, existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { mkdtemp } from 'node:fs/promises';
|
||||
import { existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { setTimeout as setAsyncTimeout } from 'node:timers/promises';
|
||||
import { promisify } from 'node:util';
|
||||
import { createGzip } from 'node:zlib';
|
||||
import pg from 'pg';
|
||||
import { io, type Socket } from 'socket.io-client';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
@@ -93,9 +84,8 @@ export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer $
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
|
||||
export const dockerExec = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', args.join(' ')]);
|
||||
export const immichAdmin = (args: string[]) => dockerExec([`immich-admin ${args.join(' ')}`]);
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
|
||||
export const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
@@ -159,26 +149,12 @@ const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||
};
|
||||
|
||||
export const utils = {
|
||||
connectDatabase: async () => {
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
client.on('end', () => (client = null));
|
||||
client.on('error', () => (client = null));
|
||||
await client.connect();
|
||||
}
|
||||
|
||||
return client;
|
||||
},
|
||||
|
||||
disconnectDatabase: async () => {
|
||||
if (client) {
|
||||
await client.end();
|
||||
}
|
||||
},
|
||||
|
||||
resetDatabase: async (tables?: string[]) => {
|
||||
try {
|
||||
client = await utils.connectDatabase();
|
||||
if (!client) {
|
||||
client = new pg.Client(dbUrl);
|
||||
await client.connect();
|
||||
}
|
||||
|
||||
tables = tables || [
|
||||
// TODO e2e test for deleting a stack, since it is quite complex
|
||||
@@ -505,9 +481,6 @@ export const utils = {
|
||||
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
|
||||
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
createJob: async (accessToken: string, jobCreateDto: JobCreateDto) =>
|
||||
createJob({ jobCreateDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
|
||||
runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
@@ -586,36 +559,6 @@ export const utils = {
|
||||
mkdirSync(`${testAssetDir}/temp`, { recursive: true });
|
||||
},
|
||||
|
||||
createBackup: async (accessToken: string) => {
|
||||
await utils.createJob(accessToken, {
|
||||
name: ManualJobName.BackupDatabase,
|
||||
});
|
||||
|
||||
return await utils.poll(
|
||||
() => request(app).get('/admin/database-backups').set('Authorization', `Bearer ${accessToken}`),
|
||||
({ status, body }) => status === 200 && body.backups.length === 1,
|
||||
({ body }) => body.backups[0],
|
||||
);
|
||||
},
|
||||
|
||||
resetBackups: async (accessToken: string) => {
|
||||
const { backups } = await listDatabaseBackups({ headers: asBearerAuth(accessToken) });
|
||||
await deleteDatabaseBackup({ databaseBackupDeleteDto: { backups } }, { headers: asBearerAuth(accessToken) });
|
||||
},
|
||||
|
||||
prepareTestBackup: async (generate: 'corrupted') => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'test-'));
|
||||
const fn = join(dir, 'file');
|
||||
|
||||
const sql = Readable.from('IM CORRUPTED;');
|
||||
const gzip = createGzip();
|
||||
const writeStream = createWriteStream(fn);
|
||||
await pipeline(sql, gzip, writeStream);
|
||||
|
||||
await executeCommand('docker', ['cp', fn, `immich-e2e-server:/data/backups/development-${generate}.sql.gz`])
|
||||
.promise;
|
||||
},
|
||||
|
||||
resetAdminConfig: async (accessToken: string) => {
|
||||
const defaultConfig = await getConfigDefaults({ headers: asBearerAuth(accessToken) });
|
||||
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
|
||||
@@ -658,25 +601,6 @@ export const utils = {
|
||||
await utils.waitForQueueFinish(accessToken, 'sidecar');
|
||||
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
|
||||
},
|
||||
|
||||
async poll<T>(cb: () => Promise<T>, validate: (value: T) => boolean, map?: (value: T) => any) {
|
||||
let timeout = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const data = await cb();
|
||||
if (validate(data)) {
|
||||
return map ? map(data) : data;
|
||||
}
|
||||
timeout++;
|
||||
if (timeout >= 10) {
|
||||
throw 'Could not clean up test.';
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 5e2));
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
utils.initSdk();
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
|
||||
test.describe('Database Backups', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
test('restore a backup from settings', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/admin/maintenance**', { timeout: 60_000 });
|
||||
});
|
||||
|
||||
test('handle backup restore failure', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.prepareTestBackup('corrupted');
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance?isOpen=backups');
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await expect(page.getByText('IM CORRUPTED')).toBeVisible({ timeout: 60_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/admin/maintenance**');
|
||||
});
|
||||
|
||||
test('restore a backup from onboarding', async ({ context, page }) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
await utils.resetBackups(admin.accessToken);
|
||||
await utils.createBackup(admin.accessToken);
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
await utils.resetDatabase();
|
||||
|
||||
await page.goto('/');
|
||||
await page.getByRole('button', { name: 'Restore from backup' }).click();
|
||||
|
||||
try {
|
||||
await page.waitForURL('/maintenance**');
|
||||
} catch {
|
||||
// when chained with the rest of the tests
|
||||
// this navigation may fail..? not sure why...
|
||||
await page.goto('/maintenance');
|
||||
await page.waitForURL('/maintenance**');
|
||||
}
|
||||
|
||||
await page.getByRole('button', { name: 'Next' }).click();
|
||||
await page.getByRole('button', { name: 'Restore', exact: true }).click();
|
||||
await page.locator('#bits-c2').getByRole('button', { name: 'Restore' }).click();
|
||||
|
||||
await page.waitForURL('/maintenance?**');
|
||||
await page.waitForURL('/photos', { timeout: 60_000 });
|
||||
});
|
||||
});
|
||||
@@ -16,12 +16,12 @@ test.describe('Maintenance', () => {
|
||||
test('enter and exit maintenance mode', async ({ context, page }) => {
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/maintenance');
|
||||
await page.getByRole('button', { name: 'Switch to maintenance mode' }).click();
|
||||
await page.goto('/admin/system-settings?isOpen=maintenance');
|
||||
await page.getByRole('button', { name: 'Start maintenance mode' }).click();
|
||||
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible({ timeout: 10_000 });
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('**/admin/maintenance*', { timeout: 10_000 });
|
||||
await page.waitForURL('**/admin/system-settings*', { timeout: 10_000 });
|
||||
});
|
||||
|
||||
test('maintenance shows no options to users until they authenticate', async ({ page }) => {
|
||||
|
||||
111
i18n/en.json
111
i18n/en.json
@@ -5,8 +5,10 @@
|
||||
"acknowledge": "Acknowledge",
|
||||
"action": "Action",
|
||||
"action_common_update": "Update",
|
||||
"action_description": "A set of action to perform on the filtered assets",
|
||||
"actions": "Actions",
|
||||
"active": "Active",
|
||||
"active_count": "Active: {count}",
|
||||
"activity": "Activity",
|
||||
"activity_changed": "Activity is {enabled, select, true {enabled} other {disabled}}",
|
||||
"add": "Add",
|
||||
@@ -14,9 +16,13 @@
|
||||
"add_a_location": "Add a location",
|
||||
"add_a_name": "Add a name",
|
||||
"add_a_title": "Add a title",
|
||||
"add_action": "Add action",
|
||||
"add_action_description": "Click to add an action to perform",
|
||||
"add_birthday": "Add a birthday",
|
||||
"add_endpoint": "Add endpoint",
|
||||
"add_exclusion_pattern": "Add exclusion pattern",
|
||||
"add_filter": "Add filter",
|
||||
"add_filter_description": "Click to add a filter condition",
|
||||
"add_location": "Add location",
|
||||
"add_more_users": "Add more users",
|
||||
"add_partner": "Add partner",
|
||||
@@ -35,6 +41,7 @@
|
||||
"add_to_shared_album": "Add to shared album",
|
||||
"add_upload_to_stack": "Add upload to stack",
|
||||
"add_url": "Add URL",
|
||||
"add_workflow_step": "Add workflow step",
|
||||
"added_to_archive": "Added to archive",
|
||||
"added_to_favorites": "Added to favorites",
|
||||
"added_to_favorites_count": "Added {count, number} to favorites",
|
||||
@@ -77,7 +84,6 @@
|
||||
"exclusion_pattern_description": "Exclusion patterns lets you ignore files and folders when scanning your library. This is useful if you have folders that contain files you don't want to import, such as RAW files.",
|
||||
"export_config_as_json_description": "Download the current system config as a JSON file",
|
||||
"external_libraries_page_description": "Admin external library page",
|
||||
"external_library_management": "External Library Management",
|
||||
"face_detection": "Face detection",
|
||||
"face_detection_description": "Detect the faces in assets using machine learning. For videos, only the thumbnail is considered. \"Refresh\" (re-)processes all assets. \"Reset\" additionally clears all current face data. \"Missing\" queues assets that haven't been processed yet. Detected faces will be queued for Facial Recognition after Face Detection is complete, grouping them into existing or new people.",
|
||||
"facial_recognition_job_description": "Group detected faces into people. This step runs after Face Detection is complete. \"Reset\" (re-)clusters all faces. \"Missing\" queues faces that don't have a person assigned.",
|
||||
@@ -111,10 +117,9 @@
|
||||
"job_not_concurrency_safe": "This job is not concurrency-safe.",
|
||||
"job_settings": "Job Settings",
|
||||
"job_settings_description": "Manage job concurrency",
|
||||
"job_status": "Job Status",
|
||||
"jobs_delayed": "{jobCount, plural, other {# delayed}}",
|
||||
"jobs_failed": "{jobCount, plural, other {# failed}}",
|
||||
"jobs_page_description": "Admin jobs page",
|
||||
"jobs_over_time": "Jobs over time",
|
||||
"library_created": "Created library: {library}",
|
||||
"library_deleted": "Library deleted",
|
||||
"library_details": "Library details",
|
||||
@@ -182,19 +187,10 @@
|
||||
"machine_learning_smart_search_enabled": "Enable smart search",
|
||||
"machine_learning_smart_search_enabled_description": "If disabled, images will not be encoded for smart search.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.",
|
||||
"maintenance_delete_backup": "Delete Backup",
|
||||
"maintenance_delete_backup_description": "This file will be irrevocably deleted.",
|
||||
"maintenance_delete_error": "Failed to delete backup.",
|
||||
"maintenance_restore_backup": "Restore Backup",
|
||||
"maintenance_restore_backup_description": "Immich will be wiped and restored from the chosen backup. A backup will be created before continuing.",
|
||||
"maintenance_restore_database_backup": "Restore database backup",
|
||||
"maintenance_restore_database_backup_description": "Rollback to an earlier database state using a backup file",
|
||||
"maintenance_settings": "Maintenance",
|
||||
"maintenance_settings_description": "Put Immich into maintenance mode.",
|
||||
"maintenance_start": "Switch to maintenance mode",
|
||||
"maintenance_start": "Start maintenance mode",
|
||||
"maintenance_start_error": "Failed to start maintenance mode.",
|
||||
"maintenance_upload_backup": "Upload database backup file",
|
||||
"maintenance_upload_backup_error": "Could not upload backup, is it an .sql/.sql.gz file?",
|
||||
"manage_concurrency": "Manage Concurrency",
|
||||
"manage_concurrency_description": "Navigate to the jobs page to manage job concurrency",
|
||||
"manage_log_settings": "Manage log settings",
|
||||
@@ -286,10 +282,14 @@
|
||||
"password_settings_description": "Manage password login settings",
|
||||
"paths_validated_successfully": "All paths validated successfully",
|
||||
"person_cleanup_job": "Person cleanup",
|
||||
"queue_details": "Queue Details",
|
||||
"queues": "Job Queues",
|
||||
"queues_page_description": "Admin job queues page",
|
||||
"quota_size_gib": "Quota Size (GiB)",
|
||||
"refreshing_all_libraries": "Refreshing all libraries",
|
||||
"registration": "Admin Registration",
|
||||
"registration_description": "Since you are the first user on the system, you will be assigned as the Admin and are responsible for administrative tasks, and additional users will be created by you.",
|
||||
"remove_failed_jobs": "Remove failed jobs",
|
||||
"require_password_change_on_login": "Require user to change password on first login",
|
||||
"reset_settings_to_default": "Reset settings to default",
|
||||
"reset_settings_to_recent_saved": "Reset settings to the recent saved settings",
|
||||
@@ -473,6 +473,7 @@
|
||||
"album_remove_user": "Remove user?",
|
||||
"album_remove_user_confirmation": "Are you sure you want to remove {user}?",
|
||||
"album_search_not_found": "No albums found matching your search",
|
||||
"album_selected": "Album selected",
|
||||
"album_share_no_users": "Looks like you have shared this album with all users or you don't have any user to share with.",
|
||||
"album_summary": "Album summary",
|
||||
"album_updated": "Album updated",
|
||||
@@ -494,6 +495,7 @@
|
||||
"albums_default_sort_order_description": "Initial asset sort order when creating new albums.",
|
||||
"albums_feature_description": "Collections of assets that can be shared with other users.",
|
||||
"albums_on_device_count": "Albums on device ({count})",
|
||||
"albums_selected": "{count, plural, one {# album selected} other {# albums selected}}",
|
||||
"all": "All",
|
||||
"all_albums": "All albums",
|
||||
"all_people": "All people",
|
||||
@@ -530,10 +532,12 @@
|
||||
"archived_count": "{count, plural, other {Archived #}}",
|
||||
"are_these_the_same_person": "Are these the same person?",
|
||||
"are_you_sure_to_do_this": "Are you sure you want to do this?",
|
||||
"array_field_not_fully_supported": "Array fields require manual JSON editing",
|
||||
"asset_action_delete_err_read_only": "Cannot delete read only asset(s), skipping",
|
||||
"asset_action_share_err_offline": "Cannot fetch offline asset(s), skipping",
|
||||
"asset_added_to_album": "Added to album",
|
||||
"asset_adding_to_album": "Adding to album…",
|
||||
"asset_created": "Asset created",
|
||||
"asset_description_updated": "Asset description has been updated",
|
||||
"asset_filename_is_offline": "Asset {filename} is offline",
|
||||
"asset_has_unassigned_faces": "Asset has unassigned faces",
|
||||
@@ -716,6 +720,8 @@
|
||||
"change_password_form_password_mismatch": "Passwords do not match",
|
||||
"change_password_form_reenter_new_password": "Re-enter New Password",
|
||||
"change_pin_code": "Change PIN code",
|
||||
"change_trigger": "Change trigger",
|
||||
"change_trigger_prompt": "Are you sure you want to change the trigger? This will remove all existing actions and filters.",
|
||||
"change_your_password": "Change your password",
|
||||
"changed_visibility_successfully": "Changed visibility successfully",
|
||||
"charging": "Charging",
|
||||
@@ -791,6 +797,7 @@
|
||||
"create_album": "Create album",
|
||||
"create_album_page_untitled": "Untitled",
|
||||
"create_api_key": "Create API key",
|
||||
"create_first_workflow": "Create first workflow",
|
||||
"create_library": "Create Library",
|
||||
"create_link": "Create link",
|
||||
"create_link_to_share": "Create link to share",
|
||||
@@ -805,14 +812,9 @@
|
||||
"create_tag": "Create tag",
|
||||
"create_tag_description": "Create a new tag. For nested tags, please enter the full path of the tag including forward slashes.",
|
||||
"create_user": "Create user",
|
||||
"create_workflow": "Create workflow",
|
||||
"created": "Created",
|
||||
"created_at": "Created",
|
||||
"created_day_ago": "Created 1 day ago",
|
||||
"created_days_ago": "Created {count} days ago",
|
||||
"created_hour_ago": "Created 1 hour ago",
|
||||
"created_hours_ago": "Created {count} hours ago",
|
||||
"created_minute_ago": "Created 1 minute ago",
|
||||
"created_minutes_ago": "Created {count} minutes ago",
|
||||
"creating_linked_albums": "Creating linked albums...",
|
||||
"crop": "Crop",
|
||||
"curated_object_page_title": "Things",
|
||||
@@ -877,6 +879,7 @@
|
||||
"deselect_all": "Deselect All",
|
||||
"details": "Details",
|
||||
"direction": "Direction",
|
||||
"disable": "Disable",
|
||||
"disabled": "Disabled",
|
||||
"disallow_edits": "Disallow edits",
|
||||
"discord": "Discord",
|
||||
@@ -939,11 +942,13 @@
|
||||
"edit_tag": "Edit tag",
|
||||
"edit_title": "Edit Title",
|
||||
"edit_user": "Edit user",
|
||||
"edit_workflow": "Edit workflow",
|
||||
"editor": "Editor",
|
||||
"editor_close_without_save_prompt": "The changes will not be saved",
|
||||
"editor_close_without_save_title": "Close editor?",
|
||||
"editor_crop_tool_h2_aspect_ratios": "Aspect ratios",
|
||||
"editor_crop_tool_h2_rotation": "Rotation",
|
||||
"editor_mode": "Editor mode",
|
||||
"email": "Email",
|
||||
"email_notifications": "Email notifications",
|
||||
"empty_folder": "This folder is empty",
|
||||
@@ -1024,6 +1029,7 @@
|
||||
"unable_to_complete_oauth_login": "Unable to complete OAuth login",
|
||||
"unable_to_connect": "Unable to connect",
|
||||
"unable_to_copy_to_clipboard": "Cannot copy to clipboard, make sure you are accessing the page through https",
|
||||
"unable_to_create": "Unable to create workflow",
|
||||
"unable_to_create_admin_account": "Unable to create admin account",
|
||||
"unable_to_create_api_key": "Unable to create a new API Key",
|
||||
"unable_to_create_library": "Unable to create library",
|
||||
@@ -1034,6 +1040,7 @@
|
||||
"unable_to_delete_exclusion_pattern": "Unable to delete exclusion pattern",
|
||||
"unable_to_delete_shared_link": "Unable to delete shared link",
|
||||
"unable_to_delete_user": "Unable to delete user",
|
||||
"unable_to_delete_workflow": "Unable to delete workflow",
|
||||
"unable_to_download_files": "Unable to download files",
|
||||
"unable_to_edit_exclusion_pattern": "Unable to edit exclusion pattern",
|
||||
"unable_to_empty_trash": "Unable to empty trash",
|
||||
@@ -1084,6 +1091,7 @@
|
||||
"unable_to_update_settings": "Unable to update settings",
|
||||
"unable_to_update_timeline_display_status": "Unable to update timeline display status",
|
||||
"unable_to_update_user": "Unable to update user",
|
||||
"unable_to_update_workflow": "Unable to update workflow",
|
||||
"unable_to_upload_file": "Unable to upload file"
|
||||
},
|
||||
"exclusion_pattern": "Exclusion pattern",
|
||||
@@ -1117,6 +1125,7 @@
|
||||
"external_network_sheet_info": "When not on the preferred Wi-Fi network, the app will connect to the server through the first of the below URLs it can reach, starting from top to bottom",
|
||||
"face_unassigned": "Unassigned",
|
||||
"failed": "Failed",
|
||||
"failed_count": "Failed: {count}",
|
||||
"failed_to_authenticate": "Failed to authenticate",
|
||||
"failed_to_load_assets": "Failed to load assets",
|
||||
"failed_to_load_folder": "Failed to load folder",
|
||||
@@ -1135,8 +1144,10 @@
|
||||
"filename": "Filename",
|
||||
"filetype": "Filetype",
|
||||
"filter": "Filter",
|
||||
"filter_description": "Conditions to filter the target assets",
|
||||
"filter_people": "Filter people",
|
||||
"filter_places": "Filter places",
|
||||
"filters": "Filters",
|
||||
"find_them_fast": "Find them fast by name with search",
|
||||
"first": "First",
|
||||
"fix_incorrect_match": "Fix incorrect match",
|
||||
@@ -1152,6 +1163,7 @@
|
||||
"general": "General",
|
||||
"geolocation_instruction_location": "Click on an asset with GPS coordinates to use its location, or select a location directly from the map",
|
||||
"get_help": "Get Help",
|
||||
"get_people_error": "Error getting people",
|
||||
"get_wifiname_error": "Could not get Wi-Fi name. Make sure you have granted the necessary permissions and are connected to a Wi-Fi network",
|
||||
"getting_started": "Getting Started",
|
||||
"go_back": "Go back",
|
||||
@@ -1183,6 +1195,7 @@
|
||||
"hide_named_person": "Hide person {name}",
|
||||
"hide_password": "Hide password",
|
||||
"hide_person": "Hide person",
|
||||
"hide_schema": "Hide schema",
|
||||
"hide_text_recognition": "Hide text recognition",
|
||||
"hide_unnamed_people": "Hide unnamed people",
|
||||
"home_page_add_to_album_conflicts": "Added {added} assets to album {album}. {failed} assets are already in the album.",
|
||||
@@ -1255,6 +1268,8 @@
|
||||
"ios_debug_info_processing_ran_at": "Processing ran {dateTime}",
|
||||
"items_count": "{count, plural, one {# item} other {# items}}",
|
||||
"jobs": "Jobs",
|
||||
"json_editor": "JSON editor",
|
||||
"json_error": "JSON error",
|
||||
"keep": "Keep",
|
||||
"keep_all": "Keep All",
|
||||
"keep_this_delete_others": "Keep this, delete others",
|
||||
@@ -1350,26 +1365,10 @@
|
||||
"loop_videos_description": "Enable to automatically loop a video in the detail viewer.",
|
||||
"main_branch_warning": "You're using a development version; we strongly recommend using a release version!",
|
||||
"main_menu": "Main menu",
|
||||
"maintenance_action_restore": "Restoring Database",
|
||||
"maintenance_description": "Immich has been put into <link>maintenance mode</link>.",
|
||||
"maintenance_end": "End maintenance mode",
|
||||
"maintenance_end_error": "Failed to end maintenance mode.",
|
||||
"maintenance_logged_in_as": "Currently logged in as {user}",
|
||||
"maintenance_restore_from_backup": "Restore From Backup",
|
||||
"maintenance_restore_library": "Restore Your Library",
|
||||
"maintenance_restore_library_confirm": "If this looks correct, continue to restoring a backup!",
|
||||
"maintenance_restore_library_description": "Restoring Database",
|
||||
"maintenance_restore_library_folder_has_files": "{folder} has {count} folder(s)",
|
||||
"maintenance_restore_library_folder_no_files": "{folder} is missing files!",
|
||||
"maintenance_restore_library_folder_pass": "readable and writable",
|
||||
"maintenance_restore_library_folder_read_fail": "not readable",
|
||||
"maintenance_restore_library_folder_write_fail": "not writable",
|
||||
"maintenance_restore_library_hint_missing_files": "You may be missing important files",
|
||||
"maintenance_restore_library_hint_regenerate_later": "You can regenerate these later in settings",
|
||||
"maintenance_restore_library_hint_storage_template_missing_files": "Using storage template? You may be missing files",
|
||||
"maintenance_restore_library_loading": "Loading integrity checks and heuristics…",
|
||||
"maintenance_task_backup": "Creating a backup of the existing database…",
|
||||
"maintenance_task_restore": "Restoring the chosen backup…",
|
||||
"maintenance_title": "Temporarily Unavailable",
|
||||
"make": "Make",
|
||||
"manage_geolocation": "Manage location",
|
||||
@@ -1439,11 +1438,13 @@
|
||||
"monthly_title_text_date_format": "MMMM y",
|
||||
"more": "More",
|
||||
"move": "Move",
|
||||
"move_down": "Move down",
|
||||
"move_off_locked_folder": "Move out of locked folder",
|
||||
"move_to": "Move to",
|
||||
"move_to_lock_folder_action_prompt": "{count} added to the locked folder",
|
||||
"move_to_locked_folder": "Move to locked folder",
|
||||
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
|
||||
"move_up": "Move up",
|
||||
"moved_to_archive": "Moved {count, plural, one {# asset} other {# assets}} to archive",
|
||||
"moved_to_library": "Moved {count, plural, one {# asset} other {# assets}} to library",
|
||||
"moved_to_trash": "Moved to trash",
|
||||
@@ -1453,6 +1454,7 @@
|
||||
"my_albums": "My albums",
|
||||
"name": "Name",
|
||||
"name_or_nickname": "Name or nickname",
|
||||
"name_required": "Name is required",
|
||||
"navigate": "Navigate",
|
||||
"navigate_to_time": "Navigate to Time",
|
||||
"network_requirement_photos_upload": "Use cellular data to backup photos",
|
||||
@@ -1477,6 +1479,7 @@
|
||||
"next": "Next",
|
||||
"next_memory": "Next memory",
|
||||
"no": "No",
|
||||
"no_actions_added": "No actions added yet",
|
||||
"no_albums_message": "Create an album to organize your photos and videos",
|
||||
"no_albums_with_name_yet": "It looks like you do not have any albums with this name yet.",
|
||||
"no_albums_yet": "It looks like you do not have any albums yet.",
|
||||
@@ -1486,11 +1489,13 @@
|
||||
"no_cast_devices_found": "No cast devices found",
|
||||
"no_checksum_local": "No checksum available - cannot fetch local assets",
|
||||
"no_checksum_remote": "No checksum available - cannot fetch remote asset",
|
||||
"no_configuration_needed": "No configuration needed",
|
||||
"no_devices": "No authorized devices",
|
||||
"no_duplicates_found": "No duplicates were found.",
|
||||
"no_exif_info_available": "No exif info available",
|
||||
"no_explore_results_message": "Upload more photos to explore your collection.",
|
||||
"no_favorites_message": "Add favorites to quickly find your best pictures and videos",
|
||||
"no_filters_added": "No filters added yet",
|
||||
"no_libraries_message": "Create an external library to view your photos and videos",
|
||||
"no_local_assets_found": "No local assets found with this checksum",
|
||||
"no_location_set": "No location set",
|
||||
@@ -1586,6 +1591,7 @@
|
||||
"people": "People",
|
||||
"people_edits_count": "Edited {count, plural, one {# person} other {# people}}",
|
||||
"people_feature_description": "Browsing photos and videos grouped by people",
|
||||
"people_selected": "{count, plural, one {# person selected} other {# people selected}}",
|
||||
"people_sidebar_description": "Display a link to People in the sidebar",
|
||||
"permanent_deletion_warning": "Permanent deletion warning",
|
||||
"permanent_deletion_warning_setting_description": "Show a warning when permanently deleting assets",
|
||||
@@ -1610,6 +1616,8 @@
|
||||
"person_age_years": "{years, plural, other {# years}} old",
|
||||
"person_birthdate": "Born on {date}",
|
||||
"person_hidden": "{name}{hidden, select, true { (hidden)} other {}}",
|
||||
"person_recognized": "Person recognized",
|
||||
"person_selected": "Person selected",
|
||||
"photo_shared_all_users": "Looks like you shared your photos with all users or you don't have any user to share with.",
|
||||
"photos": "Photos",
|
||||
"photos_and_videos": "Photos & Videos",
|
||||
@@ -1859,17 +1867,22 @@
|
||||
"second": "Second",
|
||||
"see_all_people": "See all people",
|
||||
"select": "Select",
|
||||
"select_album": "Select album",
|
||||
"select_album_cover": "Select album cover",
|
||||
"select_albums": "Select albums",
|
||||
"select_all": "Select all",
|
||||
"select_all_duplicates": "Select all duplicates",
|
||||
"select_all_in": "Select all in {group}",
|
||||
"select_avatar_color": "Select avatar color",
|
||||
"select_count": "{count, plural, one {Select #} other {Select #}}",
|
||||
"select_face": "Select face",
|
||||
"select_featured_photo": "Select featured photo",
|
||||
"select_from_computer": "Select from computer",
|
||||
"select_keep_all": "Select keep all",
|
||||
"select_library_owner": "Select library owner",
|
||||
"select_new_face": "Select new face",
|
||||
"select_people": "Select people",
|
||||
"select_person": "Select person",
|
||||
"select_person_to_tag": "Select a person to tag",
|
||||
"select_photos": "Select photos",
|
||||
"select_trash_all": "Select trash all",
|
||||
@@ -2005,6 +2018,7 @@
|
||||
"show_password": "Show password",
|
||||
"show_person_options": "Show person options",
|
||||
"show_progress_bar": "Show Progress Bar",
|
||||
"show_schema": "Show schema",
|
||||
"show_search_options": "Show search options",
|
||||
"show_shared_links": "Show shared links",
|
||||
"show_slideshow_transition": "Show slideshow transition",
|
||||
@@ -2132,6 +2146,13 @@
|
||||
"trash_page_select_assets_btn": "Select assets",
|
||||
"trash_page_title": "Trash ({count})",
|
||||
"trashed_items_will_be_permanently_deleted_after": "Trashed items will be permanently deleted after {days, plural, one {# day} other {# days}}.",
|
||||
"trigger": "Trigger",
|
||||
"trigger_asset_uploaded": "Asset Uploaded",
|
||||
"trigger_asset_uploaded_description": "Triggered when a new asset is uploaded",
|
||||
"trigger_description": "An event that kick off the workflow",
|
||||
"trigger_person_recognized": "Person Recognized",
|
||||
"trigger_person_recognized_description": "Triggered when a person is detected",
|
||||
"trigger_type": "Trigger type",
|
||||
"troubleshoot": "Troubleshoot",
|
||||
"type": "Type",
|
||||
"unable_to_change_pin_code": "Unable to change PIN code",
|
||||
@@ -2162,7 +2183,9 @@
|
||||
"unstack": "Un-stack",
|
||||
"unstack_action_prompt": "{count} unstacked",
|
||||
"unstacked_assets_count": "Un-stacked {count, plural, one {# asset} other {# assets}}",
|
||||
"unsupported_field_type": "Unsupported field type",
|
||||
"untagged": "Untagged",
|
||||
"untitled_workflow": "Untitled workflow",
|
||||
"up_next": "Up next",
|
||||
"update_location_action_prompt": "Update the location of {count} selected assets with:",
|
||||
"updated_at": "Updated",
|
||||
@@ -2208,6 +2231,7 @@
|
||||
"utilities": "Utilities",
|
||||
"validate": "Validate",
|
||||
"validate_endpoint_error": "Please enter a valid URL",
|
||||
"validation_error": "Validation error",
|
||||
"variables": "Variables",
|
||||
"version": "Version",
|
||||
"version_announcement_closing": "Your friend, Alex",
|
||||
@@ -2239,13 +2263,28 @@
|
||||
"viewer_stack_use_as_main_asset": "Use as Main Asset",
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"visibility_changed": "Visibility changed for {count, plural, one {# person} other {# people}}",
|
||||
"visual": "Visual",
|
||||
"visual_builder": "Visual builder",
|
||||
"waiting": "Waiting",
|
||||
"waiting_count": "Waiting: {count}",
|
||||
"warning": "Warning",
|
||||
"week": "Week",
|
||||
"welcome": "Welcome",
|
||||
"welcome_to_immich": "Welcome to Immich",
|
||||
"wifi_name": "Wi-Fi Name",
|
||||
"workflow": "Workflow",
|
||||
"workflow_delete_prompt": "Are you sure you want to delete this workflow?",
|
||||
"workflow_deleted": "Workflow deleted",
|
||||
"workflow_description": "Workflow description",
|
||||
"workflow_info": "Workflow info",
|
||||
"workflow_json": "Workflow JSON",
|
||||
"workflow_json_help": "Edit the workflow configuration in JSON format. Changes will sync to the visual builder.",
|
||||
"workflow_name": "Workflow name",
|
||||
"workflow_navigation_prompt": "Are you sure you want to leave without saving your changes?",
|
||||
"workflow_summary": "Workflow summary",
|
||||
"workflow_update_success": "Workflow updated successfully",
|
||||
"workflow_updated": "Workflow updated",
|
||||
"workflows": "Workflows",
|
||||
"workflows_help_text": "Workflows automate actions on your assets based on triggers and filters",
|
||||
"wrong_pin_code": "Wrong PIN code",
|
||||
"year": "Year",
|
||||
"years_ago": "{years, plural, one {# year} other {# years}} ago",
|
||||
|
||||
@@ -89,7 +89,10 @@ data class PlatformAsset (
|
||||
val height: Long? = null,
|
||||
val durationInSeconds: Long,
|
||||
val orientation: Long,
|
||||
val isFavorite: Boolean
|
||||
val isFavorite: Boolean,
|
||||
val adjustmentTime: Long? = null,
|
||||
val latitude: Double? = null,
|
||||
val longitude: Double? = null
|
||||
)
|
||||
{
|
||||
companion object {
|
||||
@@ -104,7 +107,10 @@ data class PlatformAsset (
|
||||
val durationInSeconds = pigeonVar_list[7] as Long
|
||||
val orientation = pigeonVar_list[8] as Long
|
||||
val isFavorite = pigeonVar_list[9] as Boolean
|
||||
return PlatformAsset(id, name, type, createdAt, updatedAt, width, height, durationInSeconds, orientation, isFavorite)
|
||||
val adjustmentTime = pigeonVar_list[10] as Long?
|
||||
val latitude = pigeonVar_list[11] as Double?
|
||||
val longitude = pigeonVar_list[12] as Double?
|
||||
return PlatformAsset(id, name, type, createdAt, updatedAt, width, height, durationInSeconds, orientation, isFavorite, adjustmentTime, latitude, longitude)
|
||||
}
|
||||
}
|
||||
fun toList(): List<Any?> {
|
||||
@@ -119,6 +125,9 @@ data class PlatformAsset (
|
||||
durationInSeconds,
|
||||
orientation,
|
||||
isFavorite,
|
||||
adjustmentTime,
|
||||
latitude,
|
||||
longitude,
|
||||
)
|
||||
}
|
||||
override fun equals(other: Any?): Boolean {
|
||||
|
||||
@@ -4,7 +4,6 @@ import android.annotation.SuppressLint
|
||||
import android.content.ContentUris
|
||||
import android.content.Context
|
||||
import android.database.Cursor
|
||||
import android.net.Uri
|
||||
import android.os.Bundle
|
||||
import android.provider.MediaStore
|
||||
import android.util.Base64
|
||||
|
||||
1
mobile/drift_schemas/main/drift_schema_v14.json
generated
Normal file
1
mobile/drift_schemas/main/drift_schema_v14.json
generated
Normal file
File diff suppressed because one or more lines are too long
@@ -140,6 +140,9 @@ struct PlatformAsset: Hashable {
|
||||
var durationInSeconds: Int64
|
||||
var orientation: Int64
|
||||
var isFavorite: Bool
|
||||
var adjustmentTime: Int64? = nil
|
||||
var latitude: Double? = nil
|
||||
var longitude: Double? = nil
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
@@ -154,6 +157,9 @@ struct PlatformAsset: Hashable {
|
||||
let durationInSeconds = pigeonVar_list[7] as! Int64
|
||||
let orientation = pigeonVar_list[8] as! Int64
|
||||
let isFavorite = pigeonVar_list[9] as! Bool
|
||||
let adjustmentTime: Int64? = nilOrValue(pigeonVar_list[10])
|
||||
let latitude: Double? = nilOrValue(pigeonVar_list[11])
|
||||
let longitude: Double? = nilOrValue(pigeonVar_list[12])
|
||||
|
||||
return PlatformAsset(
|
||||
id: id,
|
||||
@@ -165,7 +171,10 @@ struct PlatformAsset: Hashable {
|
||||
height: height,
|
||||
durationInSeconds: durationInSeconds,
|
||||
orientation: orientation,
|
||||
isFavorite: isFavorite
|
||||
isFavorite: isFavorite,
|
||||
adjustmentTime: adjustmentTime,
|
||||
latitude: latitude,
|
||||
longitude: longitude
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
@@ -180,6 +189,9 @@ struct PlatformAsset: Hashable {
|
||||
durationInSeconds,
|
||||
orientation,
|
||||
isFavorite,
|
||||
adjustmentTime,
|
||||
latitude,
|
||||
longitude,
|
||||
]
|
||||
}
|
||||
static func == (lhs: PlatformAsset, rhs: PlatformAsset) -> Bool {
|
||||
|
||||
@@ -12,7 +12,10 @@ extension PHAsset {
|
||||
height: Int64(pixelHeight),
|
||||
durationInSeconds: Int64(duration),
|
||||
orientation: 0,
|
||||
isFavorite: isFavorite
|
||||
isFavorite: isFavorite,
|
||||
adjustmentTime: adjustmentTimestamp,
|
||||
latitude: location?.coordinate.latitude,
|
||||
longitude: location?.coordinate.longitude
|
||||
)
|
||||
}
|
||||
|
||||
@@ -23,6 +26,13 @@ extension PHAsset {
|
||||
var filename: String? {
|
||||
return value(forKey: "filename") as? String
|
||||
}
|
||||
|
||||
var adjustmentTimestamp: Int64? {
|
||||
if let date = value(forKey: "adjustmentTimestamp") as? Date {
|
||||
return Int64(date.timeIntervalSince1970)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// This method is expected to be slow as it goes through the asset resources to fetch the originalFilename
|
||||
var originalFilename: String? {
|
||||
|
||||
@@ -5,6 +5,10 @@ class LocalAsset extends BaseAsset {
|
||||
final String? remoteAssetId;
|
||||
final int orientation;
|
||||
|
||||
final DateTime? adjustmentTime;
|
||||
final double? latitude;
|
||||
final double? longitude;
|
||||
|
||||
const LocalAsset({
|
||||
required this.id,
|
||||
String? remoteId,
|
||||
@@ -19,6 +23,9 @@ class LocalAsset extends BaseAsset {
|
||||
super.isFavorite = false,
|
||||
super.livePhotoVideoId,
|
||||
this.orientation = 0,
|
||||
this.adjustmentTime,
|
||||
this.latitude,
|
||||
this.longitude,
|
||||
}) : remoteAssetId = remoteId;
|
||||
|
||||
@override
|
||||
@@ -33,6 +40,8 @@ class LocalAsset extends BaseAsset {
|
||||
@override
|
||||
String get heroTag => '${id}_${remoteId ?? checksum}';
|
||||
|
||||
bool get hasCoordinates => latitude != null && longitude != null && latitude != 0 && longitude != 0;
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return '''LocalAsset {
|
||||
@@ -47,6 +56,9 @@ class LocalAsset extends BaseAsset {
|
||||
remoteId: ${remoteId ?? "<NA>"}
|
||||
isFavorite: $isFavorite,
|
||||
orientation: $orientation,
|
||||
adjustmentTime: $adjustmentTime,
|
||||
latitude: ${latitude ?? "<NA>"},
|
||||
longitude: ${longitude ?? "<NA>"},
|
||||
}''';
|
||||
}
|
||||
|
||||
@@ -55,11 +67,23 @@ class LocalAsset extends BaseAsset {
|
||||
bool operator ==(Object other) {
|
||||
if (other is! LocalAsset) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return super == other && id == other.id && orientation == other.orientation;
|
||||
return super == other &&
|
||||
id == other.id &&
|
||||
orientation == other.orientation &&
|
||||
adjustmentTime == other.adjustmentTime &&
|
||||
latitude == other.latitude &&
|
||||
longitude == other.longitude;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => super.hashCode ^ id.hashCode ^ remoteId.hashCode ^ orientation.hashCode;
|
||||
int get hashCode =>
|
||||
super.hashCode ^
|
||||
id.hashCode ^
|
||||
remoteId.hashCode ^
|
||||
orientation.hashCode ^
|
||||
adjustmentTime.hashCode ^
|
||||
latitude.hashCode ^
|
||||
longitude.hashCode;
|
||||
|
||||
LocalAsset copyWith({
|
||||
String? id,
|
||||
@@ -74,6 +98,9 @@ class LocalAsset extends BaseAsset {
|
||||
int? durationInSeconds,
|
||||
bool? isFavorite,
|
||||
int? orientation,
|
||||
DateTime? adjustmentTime,
|
||||
double? latitude,
|
||||
double? longitude,
|
||||
}) {
|
||||
return LocalAsset(
|
||||
id: id ?? this.id,
|
||||
@@ -88,6 +115,9 @@ class LocalAsset extends BaseAsset {
|
||||
durationInSeconds: durationInSeconds ?? this.durationInSeconds,
|
||||
isFavorite: isFavorite ?? this.isFavorite,
|
||||
orientation: orientation ?? this.orientation,
|
||||
adjustmentTime: adjustmentTime ?? this.adjustmentTime,
|
||||
latitude: latitude ?? this.latitude,
|
||||
longitude: longitude ?? this.longitude,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ class ExifInfo {
|
||||
|
||||
String get fNumber => f == null ? "" : f!.toStringAsFixed(1);
|
||||
|
||||
String get focalLength => mm == null ? "" : mm!.toStringAsFixed(1);
|
||||
String get focalLength => mm == null ? "" : mm!.toStringAsFixed(3);
|
||||
|
||||
const ExifInfo({
|
||||
this.assetId,
|
||||
|
||||
@@ -286,11 +286,23 @@ class LocalSyncService {
|
||||
}
|
||||
|
||||
bool _assetsEqual(LocalAsset a, LocalAsset b) {
|
||||
return a.updatedAt.isAtSameMomentAs(b.updatedAt) &&
|
||||
if (CurrentPlatform.isAndroid) {
|
||||
return a.updatedAt.isAtSameMomentAs(b.updatedAt) &&
|
||||
a.createdAt.isAtSameMomentAs(b.createdAt) &&
|
||||
a.width == b.width &&
|
||||
a.height == b.height &&
|
||||
a.durationInSeconds == b.durationInSeconds;
|
||||
}
|
||||
|
||||
final firstAdjustment = a.adjustmentTime?.millisecondsSinceEpoch ?? 0;
|
||||
final secondAdjustment = b.adjustmentTime?.millisecondsSinceEpoch ?? 0;
|
||||
return firstAdjustment == secondAdjustment &&
|
||||
a.createdAt.isAtSameMomentAs(b.createdAt) &&
|
||||
a.width == b.width &&
|
||||
a.height == b.height &&
|
||||
a.durationInSeconds == b.durationInSeconds;
|
||||
a.durationInSeconds == b.durationInSeconds &&
|
||||
a.latitude == b.latitude &&
|
||||
a.longitude == b.longitude;
|
||||
}
|
||||
|
||||
bool _albumsEqual(LocalAlbum a, LocalAlbum b) {
|
||||
@@ -376,5 +388,8 @@ extension PlatformToLocalAsset on PlatformAsset {
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
orientation: orientation,
|
||||
adjustmentTime: tryFromSecondsSinceEpoch(adjustmentTime, isUtc: true),
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -166,5 +166,6 @@ extension RemoteExifEntityDataDomainEx on RemoteExifEntityData {
|
||||
mm: focalLength?.toDouble(),
|
||||
lens: lens,
|
||||
isFlipped: ExifDtoConverter.isOrientationFlipped(orientation),
|
||||
exposureSeconds: ExifDtoConverter.exposureTimeToSeconds(exposureTime),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -16,6 +16,12 @@ class LocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
|
||||
|
||||
IntColumn get orientation => integer().withDefault(const Constant(0))();
|
||||
|
||||
DateTimeColumn get adjustmentTime => dateTime().nullable()();
|
||||
|
||||
RealColumn get latitude => real().nullable()();
|
||||
|
||||
RealColumn get longitude => real().nullable()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {id};
|
||||
}
|
||||
@@ -34,5 +40,8 @@ extension LocalAssetEntityDataDomainExtension on LocalAssetEntityData {
|
||||
width: width,
|
||||
remoteId: remoteId,
|
||||
orientation: orientation,
|
||||
adjustmentTime: adjustmentTime,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -21,6 +21,9 @@ typedef $$LocalAssetEntityTableCreateCompanionBuilder =
|
||||
i0.Value<String?> checksum,
|
||||
i0.Value<bool> isFavorite,
|
||||
i0.Value<int> orientation,
|
||||
i0.Value<DateTime?> adjustmentTime,
|
||||
i0.Value<double?> latitude,
|
||||
i0.Value<double?> longitude,
|
||||
});
|
||||
typedef $$LocalAssetEntityTableUpdateCompanionBuilder =
|
||||
i1.LocalAssetEntityCompanion Function({
|
||||
@@ -35,6 +38,9 @@ typedef $$LocalAssetEntityTableUpdateCompanionBuilder =
|
||||
i0.Value<String?> checksum,
|
||||
i0.Value<bool> isFavorite,
|
||||
i0.Value<int> orientation,
|
||||
i0.Value<DateTime?> adjustmentTime,
|
||||
i0.Value<double?> latitude,
|
||||
i0.Value<double?> longitude,
|
||||
});
|
||||
|
||||
class $$LocalAssetEntityTableFilterComposer
|
||||
@@ -101,6 +107,21 @@ class $$LocalAssetEntityTableFilterComposer
|
||||
column: $table.orientation,
|
||||
builder: (column) => i0.ColumnFilters(column),
|
||||
);
|
||||
|
||||
i0.ColumnFilters<DateTime> get adjustmentTime => $composableBuilder(
|
||||
column: $table.adjustmentTime,
|
||||
builder: (column) => i0.ColumnFilters(column),
|
||||
);
|
||||
|
||||
i0.ColumnFilters<double> get latitude => $composableBuilder(
|
||||
column: $table.latitude,
|
||||
builder: (column) => i0.ColumnFilters(column),
|
||||
);
|
||||
|
||||
i0.ColumnFilters<double> get longitude => $composableBuilder(
|
||||
column: $table.longitude,
|
||||
builder: (column) => i0.ColumnFilters(column),
|
||||
);
|
||||
}
|
||||
|
||||
class $$LocalAssetEntityTableOrderingComposer
|
||||
@@ -166,6 +187,21 @@ class $$LocalAssetEntityTableOrderingComposer
|
||||
column: $table.orientation,
|
||||
builder: (column) => i0.ColumnOrderings(column),
|
||||
);
|
||||
|
||||
i0.ColumnOrderings<DateTime> get adjustmentTime => $composableBuilder(
|
||||
column: $table.adjustmentTime,
|
||||
builder: (column) => i0.ColumnOrderings(column),
|
||||
);
|
||||
|
||||
i0.ColumnOrderings<double> get latitude => $composableBuilder(
|
||||
column: $table.latitude,
|
||||
builder: (column) => i0.ColumnOrderings(column),
|
||||
);
|
||||
|
||||
i0.ColumnOrderings<double> get longitude => $composableBuilder(
|
||||
column: $table.longitude,
|
||||
builder: (column) => i0.ColumnOrderings(column),
|
||||
);
|
||||
}
|
||||
|
||||
class $$LocalAssetEntityTableAnnotationComposer
|
||||
@@ -215,6 +251,17 @@ class $$LocalAssetEntityTableAnnotationComposer
|
||||
column: $table.orientation,
|
||||
builder: (column) => column,
|
||||
);
|
||||
|
||||
i0.GeneratedColumn<DateTime> get adjustmentTime => $composableBuilder(
|
||||
column: $table.adjustmentTime,
|
||||
builder: (column) => column,
|
||||
);
|
||||
|
||||
i0.GeneratedColumn<double> get latitude =>
|
||||
$composableBuilder(column: $table.latitude, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<double> get longitude =>
|
||||
$composableBuilder(column: $table.longitude, builder: (column) => column);
|
||||
}
|
||||
|
||||
class $$LocalAssetEntityTableTableManager
|
||||
@@ -268,6 +315,9 @@ class $$LocalAssetEntityTableTableManager
|
||||
i0.Value<String?> checksum = const i0.Value.absent(),
|
||||
i0.Value<bool> isFavorite = const i0.Value.absent(),
|
||||
i0.Value<int> orientation = const i0.Value.absent(),
|
||||
i0.Value<DateTime?> adjustmentTime = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
}) => i1.LocalAssetEntityCompanion(
|
||||
name: name,
|
||||
type: type,
|
||||
@@ -280,6 +330,9 @@ class $$LocalAssetEntityTableTableManager
|
||||
checksum: checksum,
|
||||
isFavorite: isFavorite,
|
||||
orientation: orientation,
|
||||
adjustmentTime: adjustmentTime,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
),
|
||||
createCompanionCallback:
|
||||
({
|
||||
@@ -294,6 +347,9 @@ class $$LocalAssetEntityTableTableManager
|
||||
i0.Value<String?> checksum = const i0.Value.absent(),
|
||||
i0.Value<bool> isFavorite = const i0.Value.absent(),
|
||||
i0.Value<int> orientation = const i0.Value.absent(),
|
||||
i0.Value<DateTime?> adjustmentTime = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
}) => i1.LocalAssetEntityCompanion.insert(
|
||||
name: name,
|
||||
type: type,
|
||||
@@ -306,6 +362,9 @@ class $$LocalAssetEntityTableTableManager
|
||||
checksum: checksum,
|
||||
isFavorite: isFavorite,
|
||||
orientation: orientation,
|
||||
adjustmentTime: adjustmentTime,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
|
||||
@@ -473,6 +532,39 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: const i4.Constant(0),
|
||||
);
|
||||
static const i0.VerificationMeta _adjustmentTimeMeta =
|
||||
const i0.VerificationMeta('adjustmentTime');
|
||||
@override
|
||||
late final i0.GeneratedColumn<DateTime> adjustmentTime =
|
||||
i0.GeneratedColumn<DateTime>(
|
||||
'adjustment_time',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i0.DriftSqlType.dateTime,
|
||||
requiredDuringInsert: false,
|
||||
);
|
||||
static const i0.VerificationMeta _latitudeMeta = const i0.VerificationMeta(
|
||||
'latitude',
|
||||
);
|
||||
@override
|
||||
late final i0.GeneratedColumn<double> latitude = i0.GeneratedColumn<double>(
|
||||
'latitude',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i0.DriftSqlType.double,
|
||||
requiredDuringInsert: false,
|
||||
);
|
||||
static const i0.VerificationMeta _longitudeMeta = const i0.VerificationMeta(
|
||||
'longitude',
|
||||
);
|
||||
@override
|
||||
late final i0.GeneratedColumn<double> longitude = i0.GeneratedColumn<double>(
|
||||
'longitude',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i0.DriftSqlType.double,
|
||||
requiredDuringInsert: false,
|
||||
);
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [
|
||||
name,
|
||||
@@ -486,6 +578,9 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
|
||||
checksum,
|
||||
isFavorite,
|
||||
orientation,
|
||||
adjustmentTime,
|
||||
latitude,
|
||||
longitude,
|
||||
];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@@ -566,6 +661,27 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
|
||||
),
|
||||
);
|
||||
}
|
||||
if (data.containsKey('adjustment_time')) {
|
||||
context.handle(
|
||||
_adjustmentTimeMeta,
|
||||
adjustmentTime.isAcceptableOrUnknown(
|
||||
data['adjustment_time']!,
|
||||
_adjustmentTimeMeta,
|
||||
),
|
||||
);
|
||||
}
|
||||
if (data.containsKey('latitude')) {
|
||||
context.handle(
|
||||
_latitudeMeta,
|
||||
latitude.isAcceptableOrUnknown(data['latitude']!, _latitudeMeta),
|
||||
);
|
||||
}
|
||||
if (data.containsKey('longitude')) {
|
||||
context.handle(
|
||||
_longitudeMeta,
|
||||
longitude.isAcceptableOrUnknown(data['longitude']!, _longitudeMeta),
|
||||
);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@@ -624,6 +740,18 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
|
||||
i0.DriftSqlType.int,
|
||||
data['${effectivePrefix}orientation'],
|
||||
)!,
|
||||
adjustmentTime: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.dateTime,
|
||||
data['${effectivePrefix}adjustment_time'],
|
||||
),
|
||||
latitude: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.double,
|
||||
data['${effectivePrefix}latitude'],
|
||||
),
|
||||
longitude: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.double,
|
||||
data['${effectivePrefix}longitude'],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -653,6 +781,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
final String? checksum;
|
||||
final bool isFavorite;
|
||||
final int orientation;
|
||||
final DateTime? adjustmentTime;
|
||||
final double? latitude;
|
||||
final double? longitude;
|
||||
const LocalAssetEntityData({
|
||||
required this.name,
|
||||
required this.type,
|
||||
@@ -665,6 +796,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
this.checksum,
|
||||
required this.isFavorite,
|
||||
required this.orientation,
|
||||
this.adjustmentTime,
|
||||
this.latitude,
|
||||
this.longitude,
|
||||
});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
@@ -692,6 +826,15 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
}
|
||||
map['is_favorite'] = i0.Variable<bool>(isFavorite);
|
||||
map['orientation'] = i0.Variable<int>(orientation);
|
||||
if (!nullToAbsent || adjustmentTime != null) {
|
||||
map['adjustment_time'] = i0.Variable<DateTime>(adjustmentTime);
|
||||
}
|
||||
if (!nullToAbsent || latitude != null) {
|
||||
map['latitude'] = i0.Variable<double>(latitude);
|
||||
}
|
||||
if (!nullToAbsent || longitude != null) {
|
||||
map['longitude'] = i0.Variable<double>(longitude);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@@ -714,6 +857,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
checksum: serializer.fromJson<String?>(json['checksum']),
|
||||
isFavorite: serializer.fromJson<bool>(json['isFavorite']),
|
||||
orientation: serializer.fromJson<int>(json['orientation']),
|
||||
adjustmentTime: serializer.fromJson<DateTime?>(json['adjustmentTime']),
|
||||
latitude: serializer.fromJson<double?>(json['latitude']),
|
||||
longitude: serializer.fromJson<double?>(json['longitude']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
@@ -733,6 +879,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
'checksum': serializer.toJson<String?>(checksum),
|
||||
'isFavorite': serializer.toJson<bool>(isFavorite),
|
||||
'orientation': serializer.toJson<int>(orientation),
|
||||
'adjustmentTime': serializer.toJson<DateTime?>(adjustmentTime),
|
||||
'latitude': serializer.toJson<double?>(latitude),
|
||||
'longitude': serializer.toJson<double?>(longitude),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -748,6 +897,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
i0.Value<String?> checksum = const i0.Value.absent(),
|
||||
bool? isFavorite,
|
||||
int? orientation,
|
||||
i0.Value<DateTime?> adjustmentTime = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
}) => i1.LocalAssetEntityData(
|
||||
name: name ?? this.name,
|
||||
type: type ?? this.type,
|
||||
@@ -762,6 +914,11 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
checksum: checksum.present ? checksum.value : this.checksum,
|
||||
isFavorite: isFavorite ?? this.isFavorite,
|
||||
orientation: orientation ?? this.orientation,
|
||||
adjustmentTime: adjustmentTime.present
|
||||
? adjustmentTime.value
|
||||
: this.adjustmentTime,
|
||||
latitude: latitude.present ? latitude.value : this.latitude,
|
||||
longitude: longitude.present ? longitude.value : this.longitude,
|
||||
);
|
||||
LocalAssetEntityData copyWithCompanion(i1.LocalAssetEntityCompanion data) {
|
||||
return LocalAssetEntityData(
|
||||
@@ -782,6 +939,11 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
orientation: data.orientation.present
|
||||
? data.orientation.value
|
||||
: this.orientation,
|
||||
adjustmentTime: data.adjustmentTime.present
|
||||
? data.adjustmentTime.value
|
||||
: this.adjustmentTime,
|
||||
latitude: data.latitude.present ? data.latitude.value : this.latitude,
|
||||
longitude: data.longitude.present ? data.longitude.value : this.longitude,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -798,7 +960,10 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
..write('id: $id, ')
|
||||
..write('checksum: $checksum, ')
|
||||
..write('isFavorite: $isFavorite, ')
|
||||
..write('orientation: $orientation')
|
||||
..write('orientation: $orientation, ')
|
||||
..write('adjustmentTime: $adjustmentTime, ')
|
||||
..write('latitude: $latitude, ')
|
||||
..write('longitude: $longitude')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
@@ -816,6 +981,9 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
checksum,
|
||||
isFavorite,
|
||||
orientation,
|
||||
adjustmentTime,
|
||||
latitude,
|
||||
longitude,
|
||||
);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
@@ -831,7 +999,10 @@ class LocalAssetEntityData extends i0.DataClass
|
||||
other.id == this.id &&
|
||||
other.checksum == this.checksum &&
|
||||
other.isFavorite == this.isFavorite &&
|
||||
other.orientation == this.orientation);
|
||||
other.orientation == this.orientation &&
|
||||
other.adjustmentTime == this.adjustmentTime &&
|
||||
other.latitude == this.latitude &&
|
||||
other.longitude == this.longitude);
|
||||
}
|
||||
|
||||
class LocalAssetEntityCompanion
|
||||
@@ -847,6 +1018,9 @@ class LocalAssetEntityCompanion
|
||||
final i0.Value<String?> checksum;
|
||||
final i0.Value<bool> isFavorite;
|
||||
final i0.Value<int> orientation;
|
||||
final i0.Value<DateTime?> adjustmentTime;
|
||||
final i0.Value<double?> latitude;
|
||||
final i0.Value<double?> longitude;
|
||||
const LocalAssetEntityCompanion({
|
||||
this.name = const i0.Value.absent(),
|
||||
this.type = const i0.Value.absent(),
|
||||
@@ -859,6 +1033,9 @@ class LocalAssetEntityCompanion
|
||||
this.checksum = const i0.Value.absent(),
|
||||
this.isFavorite = const i0.Value.absent(),
|
||||
this.orientation = const i0.Value.absent(),
|
||||
this.adjustmentTime = const i0.Value.absent(),
|
||||
this.latitude = const i0.Value.absent(),
|
||||
this.longitude = const i0.Value.absent(),
|
||||
});
|
||||
LocalAssetEntityCompanion.insert({
|
||||
required String name,
|
||||
@@ -872,6 +1049,9 @@ class LocalAssetEntityCompanion
|
||||
this.checksum = const i0.Value.absent(),
|
||||
this.isFavorite = const i0.Value.absent(),
|
||||
this.orientation = const i0.Value.absent(),
|
||||
this.adjustmentTime = const i0.Value.absent(),
|
||||
this.latitude = const i0.Value.absent(),
|
||||
this.longitude = const i0.Value.absent(),
|
||||
}) : name = i0.Value(name),
|
||||
type = i0.Value(type),
|
||||
id = i0.Value(id);
|
||||
@@ -887,6 +1067,9 @@ class LocalAssetEntityCompanion
|
||||
i0.Expression<String>? checksum,
|
||||
i0.Expression<bool>? isFavorite,
|
||||
i0.Expression<int>? orientation,
|
||||
i0.Expression<DateTime>? adjustmentTime,
|
||||
i0.Expression<double>? latitude,
|
||||
i0.Expression<double>? longitude,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (name != null) 'name': name,
|
||||
@@ -900,6 +1083,9 @@ class LocalAssetEntityCompanion
|
||||
if (checksum != null) 'checksum': checksum,
|
||||
if (isFavorite != null) 'is_favorite': isFavorite,
|
||||
if (orientation != null) 'orientation': orientation,
|
||||
if (adjustmentTime != null) 'adjustment_time': adjustmentTime,
|
||||
if (latitude != null) 'latitude': latitude,
|
||||
if (longitude != null) 'longitude': longitude,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -915,6 +1101,9 @@ class LocalAssetEntityCompanion
|
||||
i0.Value<String?>? checksum,
|
||||
i0.Value<bool>? isFavorite,
|
||||
i0.Value<int>? orientation,
|
||||
i0.Value<DateTime?>? adjustmentTime,
|
||||
i0.Value<double?>? latitude,
|
||||
i0.Value<double?>? longitude,
|
||||
}) {
|
||||
return i1.LocalAssetEntityCompanion(
|
||||
name: name ?? this.name,
|
||||
@@ -928,6 +1117,9 @@ class LocalAssetEntityCompanion
|
||||
checksum: checksum ?? this.checksum,
|
||||
isFavorite: isFavorite ?? this.isFavorite,
|
||||
orientation: orientation ?? this.orientation,
|
||||
adjustmentTime: adjustmentTime ?? this.adjustmentTime,
|
||||
latitude: latitude ?? this.latitude,
|
||||
longitude: longitude ?? this.longitude,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -969,6 +1161,15 @@ class LocalAssetEntityCompanion
|
||||
if (orientation.present) {
|
||||
map['orientation'] = i0.Variable<int>(orientation.value);
|
||||
}
|
||||
if (adjustmentTime.present) {
|
||||
map['adjustment_time'] = i0.Variable<DateTime>(adjustmentTime.value);
|
||||
}
|
||||
if (latitude.present) {
|
||||
map['latitude'] = i0.Variable<double>(latitude.value);
|
||||
}
|
||||
if (longitude.present) {
|
||||
map['longitude'] = i0.Variable<double>(longitude.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@@ -985,7 +1186,10 @@ class LocalAssetEntityCompanion
|
||||
..write('id: $id, ')
|
||||
..write('checksum: $checksum, ')
|
||||
..write('isFavorite: $isFavorite, ')
|
||||
..write('orientation: $orientation')
|
||||
..write('orientation: $orientation, ')
|
||||
..write('adjustmentTime: $adjustmentTime, ')
|
||||
..write('latitude: $latitude, ')
|
||||
..write('longitude: $longitude')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
|
||||
@@ -21,6 +20,7 @@ import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.d
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/stack.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.steps.dart';
|
||||
@@ -95,7 +95,7 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
}
|
||||
|
||||
@override
|
||||
int get schemaVersion => 13;
|
||||
int get schemaVersion => 14;
|
||||
|
||||
@override
|
||||
MigrationStrategy get migration => MigrationStrategy(
|
||||
@@ -185,6 +185,11 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
await m.createIndex(v13.idxTrashedLocalAssetChecksum);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetAlbum);
|
||||
},
|
||||
from13To14: (m, v14) async {
|
||||
await m.addColumn(v14.localAssetEntity, v14.localAssetEntity.adjustmentTime);
|
||||
await m.addColumn(v14.localAssetEntity, v14.localAssetEntity.latitude);
|
||||
await m.addColumn(v14.localAssetEntity, v14.localAssetEntity.longitude);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -5485,6 +5485,462 @@ i1.GeneratedColumn<String> _column_95(String aliasedName) =>
|
||||
false,
|
||||
type: i1.DriftSqlType.string,
|
||||
);
|
||||
|
||||
final class Schema14 extends i0.VersionedSchema {
|
||||
Schema14({required super.database}) : super(version: 14);
|
||||
@override
|
||||
late final List<i1.DatabaseSchemaEntity> entities = [
|
||||
userEntity,
|
||||
remoteAssetEntity,
|
||||
stackEntity,
|
||||
localAssetEntity,
|
||||
remoteAlbumEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
idxLocalAssetChecksum,
|
||||
idxRemoteAssetOwnerChecksum,
|
||||
uQRemoteAssetsOwnerChecksum,
|
||||
uQRemoteAssetsOwnerLibraryChecksum,
|
||||
idxRemoteAssetChecksum,
|
||||
authUserEntity,
|
||||
userMetadataEntity,
|
||||
partnerEntity,
|
||||
remoteExifEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity,
|
||||
memoryEntity,
|
||||
memoryAssetEntity,
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
idxLatLng,
|
||||
idxTrashedLocalAssetChecksum,
|
||||
idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
late final Shape20 userEntity = Shape20(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_91,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape17 remoteAssetEntity = Shape17(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_13,
|
||||
_column_14,
|
||||
_column_15,
|
||||
_column_16,
|
||||
_column_17,
|
||||
_column_18,
|
||||
_column_19,
|
||||
_column_20,
|
||||
_column_21,
|
||||
_column_86,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape3 stackEntity = Shape3(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'stack_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape24 localAssetEntity = Shape24(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
_column_96,
|
||||
_column_46,
|
||||
_column_47,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape9 remoteAlbumEntity = Shape9(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_56,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_57,
|
||||
_column_58,
|
||||
_column_59,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape19 localAlbumEntity = Shape19(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_5,
|
||||
_column_31,
|
||||
_column_32,
|
||||
_column_90,
|
||||
_column_33,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape22 localAlbumAssetEntity = Shape22(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_34, _column_35, _column_33],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLocalAssetChecksum = i1.Index(
|
||||
'idx_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
|
||||
'idx_remote_asset_owner_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_library_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetChecksum = i1.Index(
|
||||
'idx_remote_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
|
||||
);
|
||||
late final Shape21 authUserEntity = Shape21(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'auth_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_2,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_92,
|
||||
_column_93,
|
||||
_column_7,
|
||||
_column_94,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape4 userMetadataEntity = Shape4(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_metadata_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
|
||||
columns: [_column_25, _column_26, _column_27],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape5 partnerEntity = Shape5(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'partner_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
|
||||
columns: [_column_28, _column_29, _column_30],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape8 remoteExifEntity = Shape8(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_exif_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id)'],
|
||||
columns: [
|
||||
_column_36,
|
||||
_column_37,
|
||||
_column_38,
|
||||
_column_39,
|
||||
_column_40,
|
||||
_column_41,
|
||||
_column_11,
|
||||
_column_10,
|
||||
_column_42,
|
||||
_column_43,
|
||||
_column_44,
|
||||
_column_45,
|
||||
_column_46,
|
||||
_column_47,
|
||||
_column_48,
|
||||
_column_49,
|
||||
_column_50,
|
||||
_column_51,
|
||||
_column_52,
|
||||
_column_53,
|
||||
_column_54,
|
||||
_column_55,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape7 remoteAlbumAssetEntity = Shape7(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_36, _column_60],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape10 remoteAlbumUserEntity = Shape10(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
|
||||
columns: [_column_60, _column_25, _column_61],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape11 memoryEntity = Shape11(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_18,
|
||||
_column_15,
|
||||
_column_8,
|
||||
_column_62,
|
||||
_column_63,
|
||||
_column_64,
|
||||
_column_65,
|
||||
_column_66,
|
||||
_column_67,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape12 memoryAssetEntity = Shape12(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
|
||||
columns: [_column_36, _column_68],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape14 personEntity = Shape14(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'person_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_1,
|
||||
_column_69,
|
||||
_column_71,
|
||||
_column_72,
|
||||
_column_73,
|
||||
_column_74,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape15 assetFaceEntity = Shape15(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'asset_face_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_36,
|
||||
_column_76,
|
||||
_column_77,
|
||||
_column_78,
|
||||
_column_79,
|
||||
_column_80,
|
||||
_column_81,
|
||||
_column_82,
|
||||
_column_83,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape18 storeEntity = Shape18(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'store_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_87, _column_88, _column_89],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape23 trashedLocalAssetEntity = Shape23(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'trashed_local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id, album_id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_95,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLatLng = i1.Index(
|
||||
'idx_lat_lng',
|
||||
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetChecksum = i1.Index(
|
||||
'idx_trashed_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetAlbum = i1.Index(
|
||||
'idx_trashed_local_asset_album',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)',
|
||||
);
|
||||
}
|
||||
|
||||
class Shape24 extends i0.VersionedTable {
|
||||
Shape24({required super.source, required super.alias}) : super.aliased();
|
||||
i1.GeneratedColumn<String> get name =>
|
||||
columnsByName['name']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<int> get type =>
|
||||
columnsByName['type']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<DateTime> get createdAt =>
|
||||
columnsByName['created_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<DateTime> get updatedAt =>
|
||||
columnsByName['updated_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<int> get width =>
|
||||
columnsByName['width']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get height =>
|
||||
columnsByName['height']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get durationInSeconds =>
|
||||
columnsByName['duration_in_seconds']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<String> get id =>
|
||||
columnsByName['id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get checksum =>
|
||||
columnsByName['checksum']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<bool> get isFavorite =>
|
||||
columnsByName['is_favorite']! as i1.GeneratedColumn<bool>;
|
||||
i1.GeneratedColumn<int> get orientation =>
|
||||
columnsByName['orientation']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<DateTime> get adjustmentTime =>
|
||||
columnsByName['adjustment_time']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<double> get latitude =>
|
||||
columnsByName['latitude']! as i1.GeneratedColumn<double>;
|
||||
i1.GeneratedColumn<double> get longitude =>
|
||||
columnsByName['longitude']! as i1.GeneratedColumn<double>;
|
||||
}
|
||||
|
||||
i1.GeneratedColumn<DateTime> _column_96(String aliasedName) =>
|
||||
i1.GeneratedColumn<DateTime>(
|
||||
'adjustment_time',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.dateTime,
|
||||
);
|
||||
i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
|
||||
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
|
||||
@@ -5498,6 +5954,7 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
required Future<void> Function(i1.Migrator m, Schema14 schema) from13To14,
|
||||
}) {
|
||||
return (currentVersion, database) async {
|
||||
switch (currentVersion) {
|
||||
@@ -5561,6 +6018,11 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from12To13(migrator, schema);
|
||||
return 13;
|
||||
case 13:
|
||||
final schema = Schema14(database: database);
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from13To14(migrator, schema);
|
||||
return 14;
|
||||
default:
|
||||
throw ArgumentError.value('Unknown migration from $currentVersion');
|
||||
}
|
||||
@@ -5580,6 +6042,7 @@ i1.OnUpgrade stepByStep({
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
required Future<void> Function(i1.Migrator m, Schema14 schema) from13To14,
|
||||
}) => i0.VersionedSchema.stepByStepHelper(
|
||||
step: migrationSteps(
|
||||
from1To2: from1To2,
|
||||
@@ -5594,5 +6057,6 @@ i1.OnUpgrade stepByStep({
|
||||
from10To11: from10To11,
|
||||
from11To12: from11To12,
|
||||
from12To13: from12To13,
|
||||
from13To14: from13To14,
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
@@ -244,7 +246,56 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
|
||||
return query.map((row) => row.readTable(_db.localAssetEntity).toDto()).get();
|
||||
}
|
||||
|
||||
Future<void> _upsertAssets(Iterable<LocalAsset> localAssets) {
|
||||
Future<void> Function(Iterable<LocalAsset>) get _upsertAssets =>
|
||||
CurrentPlatform.isIOS ? _upsertAssetsDarwin : _upsertAssetsAndroid;
|
||||
|
||||
Future<void> _upsertAssetsDarwin(Iterable<LocalAsset> localAssets) async {
|
||||
if (localAssets.isEmpty) {
|
||||
return Future.value();
|
||||
}
|
||||
|
||||
// Reset checksum if asset changed
|
||||
await _db.batch((batch) async {
|
||||
for (final asset in localAssets) {
|
||||
final companion = LocalAssetEntityCompanion(
|
||||
checksum: const Value(null),
|
||||
adjustmentTime: Value(asset.adjustmentTime),
|
||||
);
|
||||
batch.update(
|
||||
_db.localAssetEntity,
|
||||
companion,
|
||||
where: (row) => row.id.equals(asset.id) & row.adjustmentTime.isNotExp(Variable(asset.adjustmentTime)),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return _db.batch((batch) async {
|
||||
for (final asset in localAssets) {
|
||||
final companion = LocalAssetEntityCompanion.insert(
|
||||
name: asset.name,
|
||||
type: asset.type,
|
||||
createdAt: Value(asset.createdAt),
|
||||
updatedAt: Value(asset.updatedAt),
|
||||
width: Value(asset.width),
|
||||
height: Value(asset.height),
|
||||
durationInSeconds: Value(asset.durationInSeconds),
|
||||
id: asset.id,
|
||||
orientation: Value(asset.orientation),
|
||||
isFavorite: Value(asset.isFavorite),
|
||||
latitude: Value(asset.latitude),
|
||||
longitude: Value(asset.longitude),
|
||||
adjustmentTime: Value(asset.adjustmentTime),
|
||||
);
|
||||
batch.insert<$LocalAssetEntityTable, LocalAssetEntityData>(
|
||||
_db.localAssetEntity,
|
||||
companion.copyWith(checksum: const Value(null)),
|
||||
onConflict: DoUpdate((old) => companion),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> _upsertAssetsAndroid(Iterable<LocalAsset> localAssets) async {
|
||||
if (localAssets.isEmpty) {
|
||||
return Future.value();
|
||||
}
|
||||
@@ -260,6 +311,7 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
|
||||
height: Value(asset.height),
|
||||
durationInSeconds: Value(asset.durationInSeconds),
|
||||
id: asset.id,
|
||||
checksum: const Value(null),
|
||||
orientation: Value(asset.orientation),
|
||||
isFavorite: Value(asset.isFavorite),
|
||||
);
|
||||
|
||||
@@ -22,7 +22,7 @@ abstract final class ExifDtoConverter {
|
||||
f: dto.fNumber?.toDouble(),
|
||||
mm: dto.focalLength?.toDouble(),
|
||||
iso: dto.iso?.toInt(),
|
||||
exposureSeconds: _exposureTimeToSeconds(dto.exposureTime),
|
||||
exposureSeconds: exposureTimeToSeconds(dto.exposureTime),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -36,15 +36,15 @@ abstract final class ExifDtoConverter {
|
||||
return isRotated90CW || isRotated270CW;
|
||||
}
|
||||
|
||||
static double? _exposureTimeToSeconds(String? s) {
|
||||
if (s == null) {
|
||||
static double? exposureTimeToSeconds(String? second) {
|
||||
if (second == null) {
|
||||
return null;
|
||||
}
|
||||
double? value = double.tryParse(s);
|
||||
double? value = double.tryParse(second);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
final parts = s.split("/");
|
||||
final parts = second.split("/");
|
||||
if (parts.length == 2) {
|
||||
final numerator = double.tryParse(parts.firstOrNull ?? "-");
|
||||
final denominator = double.tryParse(parts.lastOrNull ?? "-");
|
||||
|
||||
@@ -49,7 +49,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
|
||||
|
||||
var currentLatLng = LatLng(currentLocation.latitude, currentLocation.longitude);
|
||||
selectedLatLng.value = currentLatLng;
|
||||
await controller.value?.animateCamera(CameraUpdate.newLatLng(currentLatLng));
|
||||
await controller.value?.animateCamera(CameraUpdate.newLatLngZoom(currentLatLng, 12));
|
||||
}
|
||||
|
||||
return MapThemeOverride(
|
||||
@@ -66,7 +66,10 @@ class MapLocationPickerPage extends HookConsumerWidget {
|
||||
borderRadius: BorderRadius.only(bottomLeft: Radius.circular(40), bottomRight: Radius.circular(40)),
|
||||
),
|
||||
child: MapLibreMap(
|
||||
initialCameraPosition: CameraPosition(target: initialLatLng, zoom: 12),
|
||||
initialCameraPosition: CameraPosition(
|
||||
target: initialLatLng,
|
||||
zoom: (initialLatLng.latitude == 0 && initialLatLng.longitude == 0) ? 1 : 12,
|
||||
),
|
||||
styleString: style,
|
||||
onMapCreated: (mapController) => controller.value = mapController,
|
||||
onStyleLoadedCallback: onStyleLoaded,
|
||||
|
||||
28
mobile/lib/platform/native_sync_api.g.dart
generated
28
mobile/lib/platform/native_sync_api.g.dart
generated
@@ -41,6 +41,9 @@ class PlatformAsset {
|
||||
required this.durationInSeconds,
|
||||
required this.orientation,
|
||||
required this.isFavorite,
|
||||
this.adjustmentTime,
|
||||
this.latitude,
|
||||
this.longitude,
|
||||
});
|
||||
|
||||
String id;
|
||||
@@ -63,8 +66,28 @@ class PlatformAsset {
|
||||
|
||||
bool isFavorite;
|
||||
|
||||
int? adjustmentTime;
|
||||
|
||||
double? latitude;
|
||||
|
||||
double? longitude;
|
||||
|
||||
List<Object?> _toList() {
|
||||
return <Object?>[id, name, type, createdAt, updatedAt, width, height, durationInSeconds, orientation, isFavorite];
|
||||
return <Object?>[
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
width,
|
||||
height,
|
||||
durationInSeconds,
|
||||
orientation,
|
||||
isFavorite,
|
||||
adjustmentTime,
|
||||
latitude,
|
||||
longitude,
|
||||
];
|
||||
}
|
||||
|
||||
Object encode() {
|
||||
@@ -84,6 +107,9 @@ class PlatformAsset {
|
||||
durationInSeconds: result[7]! as int,
|
||||
orientation: result[8]! as int,
|
||||
isFavorite: result[9]! as bool,
|
||||
adjustmentTime: result[10] as int?,
|
||||
latitude: result[11] as double?,
|
||||
longitude: result[12] as double?,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/exif.model.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
|
||||
@RoutePage()
|
||||
@@ -129,6 +130,15 @@ class _AssetPropertiesSectionState extends ConsumerState<_AssetPropertiesSection
|
||||
properties.insert(4, _PropertyItem(label: 'Orientation', value: asset.orientation.toString()));
|
||||
final albums = await ref.read(assetServiceProvider).getSourceAlbums(asset.id);
|
||||
properties.add(_PropertyItem(label: 'Album', value: albums.map((a) => a.name).join(', ')));
|
||||
if (CurrentPlatform.isIOS) {
|
||||
properties.add(_PropertyItem(label: 'Adjustment Time', value: asset.adjustmentTime?.toString()));
|
||||
}
|
||||
properties.add(
|
||||
_PropertyItem(
|
||||
label: 'GPS Coordinates',
|
||||
value: asset.hasCoordinates ? '${asset.latitude}, ${asset.longitude}' : null,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> _addRemoteAssetProperties(RemoteAsset asset) async {
|
||||
|
||||
@@ -251,8 +251,8 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
color: context.textTheme.labelLarge?.color,
|
||||
),
|
||||
subtitle: _getFileInfo(asset, exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
subtitleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
),
|
||||
);
|
||||
},
|
||||
@@ -268,8 +268,8 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
color: context.textTheme.labelLarge?.color,
|
||||
),
|
||||
subtitle: _getFileInfo(asset, exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
subtitleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -280,7 +280,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
// Asset Date and Time
|
||||
SheetTile(
|
||||
title: _getDateTime(context, asset, exifInfo),
|
||||
titleStyle: context.textTheme.bodyMedium?.copyWith(fontWeight: FontWeight.w600),
|
||||
titleStyle: context.textTheme.labelLarge,
|
||||
trailing: asset.hasRemote && isOwner ? const Icon(Icons.edit, size: 18) : null,
|
||||
onTap: asset.hasRemote && isOwner ? () async => await _editDateTime(context, ref) : null,
|
||||
),
|
||||
@@ -289,7 +289,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
const SheetLocationDetails(),
|
||||
// Details header
|
||||
SheetTile(
|
||||
title: 'exif_bottom_sheet_details'.t(context: context),
|
||||
title: 'details'.t(context: context).toUpperCase(),
|
||||
titleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
fontWeight: FontWeight.w600,
|
||||
@@ -298,29 +298,33 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
// File info
|
||||
buildFileInfoTile(),
|
||||
// Camera info
|
||||
if (cameraTitle != null)
|
||||
if (cameraTitle != null) ...[
|
||||
const SizedBox(height: 16),
|
||||
SheetTile(
|
||||
title: cameraTitle,
|
||||
titleStyle: context.textTheme.labelLarge,
|
||||
leading: Icon(Icons.camera_alt_outlined, size: 24, color: context.textTheme.labelLarge?.color),
|
||||
subtitle: _getCameraInfoSubtitle(exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
subtitleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
),
|
||||
),
|
||||
],
|
||||
// Lens info
|
||||
if (lensTitle != null)
|
||||
if (lensTitle != null) ...[
|
||||
const SizedBox(height: 16),
|
||||
SheetTile(
|
||||
title: lensTitle,
|
||||
titleStyle: context.textTheme.labelLarge,
|
||||
leading: Icon(Icons.camera_outlined, size: 24, color: context.textTheme.labelLarge?.color),
|
||||
subtitle: _getLensInfoSubtitle(exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
subtitleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
),
|
||||
),
|
||||
],
|
||||
// Appears in (Albums)
|
||||
_buildAppearsInList(ref, context),
|
||||
Padding(padding: const EdgeInsets.only(top: 16.0), child: _buildAppearsInList(ref, context)),
|
||||
// padding at the bottom to avoid cut-off
|
||||
const SizedBox(height: 100),
|
||||
],
|
||||
|
||||
@@ -78,7 +78,7 @@ class _SheetLocationDetailsState extends ConsumerState<SheetLocationDetails> {
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
SheetTile(
|
||||
title: 'exif_bottom_sheet_location'.t(context: context),
|
||||
title: 'location'.t(context: context).toUpperCase(),
|
||||
titleStyle: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
fontWeight: FontWeight.w600,
|
||||
@@ -102,7 +102,7 @@ class _SheetLocationDetailsState extends ConsumerState<SheetLocationDetails> {
|
||||
Text(
|
||||
coordinates,
|
||||
style: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(150),
|
||||
color: context.textTheme.labelMedium?.color?.withAlpha(200),
|
||||
),
|
||||
),
|
||||
],
|
||||
|
||||
@@ -46,7 +46,7 @@ class SheetTile extends ConsumerWidget {
|
||||
} else {
|
||||
titleWidget = Container(
|
||||
width: double.infinity,
|
||||
padding: const EdgeInsets.only(left: 15),
|
||||
padding: const EdgeInsets.only(left: 15, right: 15),
|
||||
child: Text(title, style: titleStyle),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
import 'package:immich_mobile/providers/album/album.provider.dart';
|
||||
import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
@@ -150,7 +151,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
|
||||
try {
|
||||
bool syncSuccess = false;
|
||||
await Future.wait([
|
||||
_safeRun(backgroundManager.syncLocal(), "syncLocal"),
|
||||
_safeRun(backgroundManager.syncLocal(full: CurrentPlatform.isAndroid ? true : false), "syncLocal"),
|
||||
_safeRun(backgroundManager.syncRemote().then((success) => syncSuccess = success), "syncRemote"),
|
||||
]);
|
||||
if (syncSuccess) {
|
||||
|
||||
@@ -81,7 +81,7 @@ Future<void> migrateDatabaseIfNeeded(Isar db, Drift drift) async {
|
||||
}
|
||||
|
||||
if (version < 19 && Store.isBetaTimelineEnabled) {
|
||||
if (!await _populateUpdatedAtTime(drift)) {
|
||||
if (!await _populateLocalAssetTime(drift)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -229,7 +229,7 @@ Future<void> _migrateDeviceAsset(Isar db) async {
|
||||
});
|
||||
}
|
||||
|
||||
Future<bool> _populateUpdatedAtTime(Drift db) async {
|
||||
Future<bool> _populateLocalAssetTime(Drift db) async {
|
||||
try {
|
||||
final nativeApi = NativeSyncApi();
|
||||
final albums = await nativeApi.getAlbums();
|
||||
@@ -240,6 +240,9 @@ Future<bool> _populateUpdatedAtTime(Drift db) async {
|
||||
batch.update(
|
||||
db.localAssetEntity,
|
||||
LocalAssetEntityCompanion(
|
||||
longitude: Value(asset.longitude),
|
||||
latitude: Value(asset.latitude),
|
||||
adjustmentTime: Value(tryFromSecondsSinceEpoch(asset.adjustmentTime, isUtc: true)),
|
||||
updatedAt: Value(tryFromSecondsSinceEpoch(asset.updatedAt, isUtc: true) ?? DateTime.timestamp()),
|
||||
),
|
||||
where: (t) => t.id.equals(asset.id),
|
||||
@@ -250,7 +253,7 @@ Future<bool> _populateUpdatedAtTime(Drift db) async {
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
dPrint(() => "[MIGRATION] Error while populating updatedAt time: $error");
|
||||
dPrint(() => "[MIGRATION] Error while populating asset time: $error");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
17
mobile/openapi/README.md
generated
17
mobile/openapi/README.md
generated
@@ -133,11 +133,6 @@ Class | Method | HTTP request | Description
|
||||
*AuthenticationApi* | [**unlockAuthSession**](doc//AuthenticationApi.md#unlockauthsession) | **POST** /auth/session/unlock | Unlock auth session
|
||||
*AuthenticationApi* | [**validateAccessToken**](doc//AuthenticationApi.md#validateaccesstoken) | **POST** /auth/validateToken | Validate access token
|
||||
*AuthenticationAdminApi* | [**unlinkAllOAuthAccountsAdmin**](doc//AuthenticationAdminApi.md#unlinkalloauthaccountsadmin) | **POST** /admin/auth/unlink-all | Unlink all OAuth accounts
|
||||
*DatabaseBackupsAdminApi* | [**deleteDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#deletedatabasebackup) | **DELETE** /admin/database-backups | Delete database backup
|
||||
*DatabaseBackupsAdminApi* | [**downloadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#downloaddatabasebackup) | **GET** /admin/database-backups/{filename} | Download database backup
|
||||
*DatabaseBackupsAdminApi* | [**listDatabaseBackups**](doc//DatabaseBackupsAdminApi.md#listdatabasebackups) | **GET** /admin/database-backups | List database backups
|
||||
*DatabaseBackupsAdminApi* | [**startDatabaseRestoreFlow**](doc//DatabaseBackupsAdminApi.md#startdatabaserestoreflow) | **POST** /admin/database-backups/start-restore | Start database backup restore flow
|
||||
*DatabaseBackupsAdminApi* | [**uploadDatabaseBackup**](doc//DatabaseBackupsAdminApi.md#uploaddatabasebackup) | **POST** /admin/database-backups/upload | Upload database backup
|
||||
*DeprecatedApi* | [**createPartnerDeprecated**](doc//DeprecatedApi.md#createpartnerdeprecated) | **POST** /partners/{id} | Create a partner
|
||||
*DeprecatedApi* | [**getAllUserAssetsByDeviceId**](doc//DeprecatedApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | Retrieve assets by device ID
|
||||
*DeprecatedApi* | [**getDeltaSync**](doc//DeprecatedApi.md#getdeltasync) | **POST** /sync/delta-sync | Get delta sync for user
|
||||
@@ -166,8 +161,6 @@ Class | Method | HTTP request | Description
|
||||
*LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | Scan a library
|
||||
*LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | Update a library
|
||||
*LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | Validate library settings
|
||||
*MaintenanceAdminApi* | [**detectPriorInstall**](doc//MaintenanceAdminApi.md#detectpriorinstall) | **GET** /admin/maintenance/detect-install | Detect existing install
|
||||
*MaintenanceAdminApi* | [**getMaintenanceStatus**](doc//MaintenanceAdminApi.md#getmaintenancestatus) | **GET** /admin/maintenance/status | Get maintenance mode status
|
||||
*MaintenanceAdminApi* | [**maintenanceLogin**](doc//MaintenanceAdminApi.md#maintenancelogin) | **POST** /admin/maintenance/login | Log into maintenance mode
|
||||
*MaintenanceAdminApi* | [**setMaintenanceMode**](doc//MaintenanceAdminApi.md#setmaintenancemode) | **POST** /admin/maintenance | Set maintenance mode
|
||||
*MapApi* | [**getMapMarkers**](doc//MapApi.md#getmapmarkers) | **GET** /map/markers | Retrieve map markers
|
||||
@@ -206,6 +199,7 @@ Class | Method | HTTP request | Description
|
||||
*PeopleApi* | [**updatePeople**](doc//PeopleApi.md#updatepeople) | **PUT** /people | Update people
|
||||
*PeopleApi* | [**updatePerson**](doc//PeopleApi.md#updateperson) | **PUT** /people/{id} | Update person
|
||||
*PluginsApi* | [**getPlugin**](doc//PluginsApi.md#getplugin) | **GET** /plugins/{id} | Retrieve a plugin
|
||||
*PluginsApi* | [**getPluginTriggers**](doc//PluginsApi.md#getplugintriggers) | **GET** /plugins/triggers | List all plugin triggers
|
||||
*PluginsApi* | [**getPlugins**](doc//PluginsApi.md#getplugins) | **GET** /plugins | List all plugins
|
||||
*QueuesApi* | [**emptyQueue**](doc//QueuesApi.md#emptyqueue) | **DELETE** /queues/{name}/jobs | Empty a queue
|
||||
*QueuesApi* | [**getQueue**](doc//QueuesApi.md#getqueue) | **GET** /queues/{name} | Retrieve a queue
|
||||
@@ -394,8 +388,6 @@ Class | Method | HTTP request | Description
|
||||
- [CreateLibraryDto](doc//CreateLibraryDto.md)
|
||||
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
||||
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
|
||||
- [DatabaseBackupDeleteDto](doc//DatabaseBackupDeleteDto.md)
|
||||
- [DatabaseBackupListResponseDto](doc//DatabaseBackupListResponseDto.md)
|
||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||
- [DownloadResponse](doc//DownloadResponse.md)
|
||||
@@ -425,10 +417,7 @@ Class | Method | HTTP request | Description
|
||||
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
|
||||
- [MaintenanceAction](doc//MaintenanceAction.md)
|
||||
- [MaintenanceAuthDto](doc//MaintenanceAuthDto.md)
|
||||
- [MaintenanceDetectInstallResponseDto](doc//MaintenanceDetectInstallResponseDto.md)
|
||||
- [MaintenanceDetectInstallStorageFolderDto](doc//MaintenanceDetectInstallStorageFolderDto.md)
|
||||
- [MaintenanceLoginDto](doc//MaintenanceLoginDto.md)
|
||||
- [MaintenanceStatusResponseDto](doc//MaintenanceStatusResponseDto.md)
|
||||
- [ManualJobName](doc//ManualJobName.md)
|
||||
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
|
||||
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)
|
||||
@@ -477,9 +466,10 @@ Class | Method | HTTP request | Description
|
||||
- [PinCodeSetupDto](doc//PinCodeSetupDto.md)
|
||||
- [PlacesResponseDto](doc//PlacesResponseDto.md)
|
||||
- [PluginActionResponseDto](doc//PluginActionResponseDto.md)
|
||||
- [PluginContext](doc//PluginContext.md)
|
||||
- [PluginContextType](doc//PluginContextType.md)
|
||||
- [PluginFilterResponseDto](doc//PluginFilterResponseDto.md)
|
||||
- [PluginResponseDto](doc//PluginResponseDto.md)
|
||||
- [PluginTriggerResponseDto](doc//PluginTriggerResponseDto.md)
|
||||
- [PluginTriggerType](doc//PluginTriggerType.md)
|
||||
- [PurchaseResponse](doc//PurchaseResponse.md)
|
||||
- [PurchaseUpdate](doc//PurchaseUpdate.md)
|
||||
@@ -540,7 +530,6 @@ Class | Method | HTTP request | Description
|
||||
- [StackResponseDto](doc//StackResponseDto.md)
|
||||
- [StackUpdateDto](doc//StackUpdateDto.md)
|
||||
- [StatisticsSearchDto](doc//StatisticsSearchDto.md)
|
||||
- [StorageFolder](doc//StorageFolder.md)
|
||||
- [SyncAckDeleteDto](doc//SyncAckDeleteDto.md)
|
||||
- [SyncAckDto](doc//SyncAckDto.md)
|
||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||
|
||||
10
mobile/openapi/lib/api.dart
generated
10
mobile/openapi/lib/api.dart
generated
@@ -36,7 +36,6 @@ part 'api/albums_api.dart';
|
||||
part 'api/assets_api.dart';
|
||||
part 'api/authentication_api.dart';
|
||||
part 'api/authentication_admin_api.dart';
|
||||
part 'api/database_backups_admin_api.dart';
|
||||
part 'api/deprecated_api.dart';
|
||||
part 'api/download_api.dart';
|
||||
part 'api/duplicates_api.dart';
|
||||
@@ -140,8 +139,6 @@ part 'model/create_album_dto.dart';
|
||||
part 'model/create_library_dto.dart';
|
||||
part 'model/create_profile_image_response_dto.dart';
|
||||
part 'model/database_backup_config.dart';
|
||||
part 'model/database_backup_delete_dto.dart';
|
||||
part 'model/database_backup_list_response_dto.dart';
|
||||
part 'model/download_archive_info.dart';
|
||||
part 'model/download_info_dto.dart';
|
||||
part 'model/download_response.dart';
|
||||
@@ -171,10 +168,7 @@ part 'model/logout_response_dto.dart';
|
||||
part 'model/machine_learning_availability_checks_dto.dart';
|
||||
part 'model/maintenance_action.dart';
|
||||
part 'model/maintenance_auth_dto.dart';
|
||||
part 'model/maintenance_detect_install_response_dto.dart';
|
||||
part 'model/maintenance_detect_install_storage_folder_dto.dart';
|
||||
part 'model/maintenance_login_dto.dart';
|
||||
part 'model/maintenance_status_response_dto.dart';
|
||||
part 'model/manual_job_name.dart';
|
||||
part 'model/map_marker_response_dto.dart';
|
||||
part 'model/map_reverse_geocode_response_dto.dart';
|
||||
@@ -223,9 +217,10 @@ part 'model/pin_code_reset_dto.dart';
|
||||
part 'model/pin_code_setup_dto.dart';
|
||||
part 'model/places_response_dto.dart';
|
||||
part 'model/plugin_action_response_dto.dart';
|
||||
part 'model/plugin_context.dart';
|
||||
part 'model/plugin_context_type.dart';
|
||||
part 'model/plugin_filter_response_dto.dart';
|
||||
part 'model/plugin_response_dto.dart';
|
||||
part 'model/plugin_trigger_response_dto.dart';
|
||||
part 'model/plugin_trigger_type.dart';
|
||||
part 'model/purchase_response.dart';
|
||||
part 'model/purchase_update.dart';
|
||||
@@ -286,7 +281,6 @@ part 'model/stack_create_dto.dart';
|
||||
part 'model/stack_response_dto.dart';
|
||||
part 'model/stack_update_dto.dart';
|
||||
part 'model/statistics_search_dto.dart';
|
||||
part 'model/storage_folder.dart';
|
||||
part 'model/sync_ack_delete_dto.dart';
|
||||
part 'model/sync_ack_dto.dart';
|
||||
part 'model/sync_ack_set_dto.dart';
|
||||
|
||||
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
269
mobile/openapi/lib/api/database_backups_admin_api.dart
generated
@@ -1,269 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class DatabaseBackupsAdminApi {
|
||||
DatabaseBackupsAdminApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<Response> deleteDatabaseBackupWithHttpInfo(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody = databaseBackupDeleteDto;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['application/json'];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'DELETE',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Delete database backup
|
||||
///
|
||||
/// Delete a backup by its filename
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [DatabaseBackupDeleteDto] databaseBackupDeleteDto (required):
|
||||
Future<void> deleteDatabaseBackup(DatabaseBackupDeleteDto databaseBackupDeleteDto,) async {
|
||||
final response = await deleteDatabaseBackupWithHttpInfo(databaseBackupDeleteDto,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<Response> downloadDatabaseBackupWithHttpInfo(String filename,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/{filename}'
|
||||
.replaceAll('{filename}', filename);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Download database backup
|
||||
///
|
||||
/// Downloads the database backup file
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] filename (required):
|
||||
Future<MultipartFile?> downloadDatabaseBackup(String filename,) async {
|
||||
final response = await downloadDatabaseBackupWithHttpInfo(filename,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> listDatabaseBackupsWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// List database backups
|
||||
///
|
||||
/// Get the list of the successful and failed backups
|
||||
Future<DatabaseBackupListResponseDto?> listDatabaseBackups() async {
|
||||
final response = await listDatabaseBackupsWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'DatabaseBackupListResponseDto',) as DatabaseBackupListResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> startDatabaseRestoreFlowWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/start-restore';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Start database backup restore flow
|
||||
///
|
||||
/// Put Immich into maintenance mode to restore a backup (Immich must not be configured)
|
||||
Future<void> startDatabaseRestoreFlow() async {
|
||||
final response = await startDatabaseRestoreFlowWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<Response> uploadDatabaseBackupWithHttpInfo({ MultipartFile? file, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/database-backups/upload';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>['multipart/form-data'];
|
||||
|
||||
bool hasFields = false;
|
||||
final mp = MultipartRequest('POST', Uri.parse(apiPath));
|
||||
if (file != null) {
|
||||
hasFields = true;
|
||||
mp.fields[r'file'] = file.field;
|
||||
mp.files.add(file);
|
||||
}
|
||||
if (hasFields) {
|
||||
postBody = mp;
|
||||
}
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Upload database backup
|
||||
///
|
||||
/// Uploads .sql/.sql.gz file to restore backup from
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [MultipartFile] file:
|
||||
Future<void> uploadDatabaseBackup({ MultipartFile? file, }) async {
|
||||
final response = await uploadDatabaseBackupWithHttpInfo( file: file, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
}
|
||||
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
96
mobile/openapi/lib/api/maintenance_admin_api.dart
generated
@@ -16,102 +16,6 @@ class MaintenanceAdminApi {
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> detectPriorInstallWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/detect-install';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Detect existing install
|
||||
///
|
||||
/// Collect integrity checks and other heuristics about local data.
|
||||
Future<MaintenanceDetectInstallResponseDto?> detectPriorInstall() async {
|
||||
final response = await detectPriorInstallWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceDetectInstallResponseDto',) as MaintenanceDetectInstallResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> getMaintenanceStatusWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/admin/maintenance/status';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Get maintenance mode status
|
||||
///
|
||||
/// Fetch information about the currently running maintenance action.
|
||||
Future<MaintenanceStatusResponseDto?> getMaintenanceStatus() async {
|
||||
final response = await getMaintenanceStatusWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MaintenanceStatusResponseDto',) as MaintenanceStatusResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Log into maintenance mode
|
||||
///
|
||||
/// Login with maintenance token or cookie to receive current information and perform further actions.
|
||||
|
||||
51
mobile/openapi/lib/api/plugins_api.dart
generated
51
mobile/openapi/lib/api/plugins_api.dart
generated
@@ -73,6 +73,57 @@ class PluginsApi {
|
||||
return null;
|
||||
}
|
||||
|
||||
/// List all plugin triggers
|
||||
///
|
||||
/// Retrieve a list of all available plugin triggers.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
Future<Response> getPluginTriggersWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/plugins/triggers';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// List all plugin triggers
|
||||
///
|
||||
/// Retrieve a list of all available plugin triggers.
|
||||
Future<List<PluginTriggerResponseDto>?> getPluginTriggers() async {
|
||||
final response = await getPluginTriggersWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
final responseBody = await _decodeBodyBytes(response);
|
||||
return (await apiClient.deserializeAsync(responseBody, 'List<PluginTriggerResponseDto>') as List)
|
||||
.cast<PluginTriggerResponseDto>()
|
||||
.toList(growable: false);
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// List all plugins
|
||||
///
|
||||
/// Retrieve a list of plugins available to the authenticated user.
|
||||
|
||||
18
mobile/openapi/lib/api_client.dart
generated
18
mobile/openapi/lib/api_client.dart
generated
@@ -326,10 +326,6 @@ class ApiClient {
|
||||
return CreateProfileImageResponseDto.fromJson(value);
|
||||
case 'DatabaseBackupConfig':
|
||||
return DatabaseBackupConfig.fromJson(value);
|
||||
case 'DatabaseBackupDeleteDto':
|
||||
return DatabaseBackupDeleteDto.fromJson(value);
|
||||
case 'DatabaseBackupListResponseDto':
|
||||
return DatabaseBackupListResponseDto.fromJson(value);
|
||||
case 'DownloadArchiveInfo':
|
||||
return DownloadArchiveInfo.fromJson(value);
|
||||
case 'DownloadInfoDto':
|
||||
@@ -388,14 +384,8 @@ class ApiClient {
|
||||
return MaintenanceActionTypeTransformer().decode(value);
|
||||
case 'MaintenanceAuthDto':
|
||||
return MaintenanceAuthDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallResponseDto':
|
||||
return MaintenanceDetectInstallResponseDto.fromJson(value);
|
||||
case 'MaintenanceDetectInstallStorageFolderDto':
|
||||
return MaintenanceDetectInstallStorageFolderDto.fromJson(value);
|
||||
case 'MaintenanceLoginDto':
|
||||
return MaintenanceLoginDto.fromJson(value);
|
||||
case 'MaintenanceStatusResponseDto':
|
||||
return MaintenanceStatusResponseDto.fromJson(value);
|
||||
case 'ManualJobName':
|
||||
return ManualJobNameTypeTransformer().decode(value);
|
||||
case 'MapMarkerResponseDto':
|
||||
@@ -492,12 +482,14 @@ class ApiClient {
|
||||
return PlacesResponseDto.fromJson(value);
|
||||
case 'PluginActionResponseDto':
|
||||
return PluginActionResponseDto.fromJson(value);
|
||||
case 'PluginContext':
|
||||
return PluginContextTypeTransformer().decode(value);
|
||||
case 'PluginContextType':
|
||||
return PluginContextTypeTypeTransformer().decode(value);
|
||||
case 'PluginFilterResponseDto':
|
||||
return PluginFilterResponseDto.fromJson(value);
|
||||
case 'PluginResponseDto':
|
||||
return PluginResponseDto.fromJson(value);
|
||||
case 'PluginTriggerResponseDto':
|
||||
return PluginTriggerResponseDto.fromJson(value);
|
||||
case 'PluginTriggerType':
|
||||
return PluginTriggerTypeTypeTransformer().decode(value);
|
||||
case 'PurchaseResponse':
|
||||
@@ -618,8 +610,6 @@ class ApiClient {
|
||||
return StackUpdateDto.fromJson(value);
|
||||
case 'StatisticsSearchDto':
|
||||
return StatisticsSearchDto.fromJson(value);
|
||||
case 'StorageFolder':
|
||||
return StorageFolderTypeTransformer().decode(value);
|
||||
case 'SyncAckDeleteDto':
|
||||
return SyncAckDeleteDto.fromJson(value);
|
||||
case 'SyncAckDto':
|
||||
|
||||
7
mobile/openapi/lib/api_helper.dart
generated
7
mobile/openapi/lib/api_helper.dart
generated
@@ -127,8 +127,8 @@ String parameterToString(dynamic value) {
|
||||
if (value is Permission) {
|
||||
return PermissionTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is PluginContext) {
|
||||
return PluginContextTypeTransformer().encode(value).toString();
|
||||
if (value is PluginContextType) {
|
||||
return PluginContextTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is PluginTriggerType) {
|
||||
return PluginTriggerTypeTypeTransformer().encode(value).toString();
|
||||
@@ -157,9 +157,6 @@ String parameterToString(dynamic value) {
|
||||
if (value is SourceType) {
|
||||
return SourceTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is StorageFolder) {
|
||||
return StorageFolderTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is SyncEntityType) {
|
||||
return SyncEntityTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
|
||||
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
101
mobile/openapi/lib/model/database_backup_delete_dto.dart
generated
@@ -1,101 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupDeleteDto {
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance.
|
||||
DatabaseBackupDeleteDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<String> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupDeleteDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupDeleteDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupDeleteDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupDeleteDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupDeleteDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupDeleteDto(
|
||||
backups: json[r'backups'] is Iterable
|
||||
? (json[r'backups'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupDeleteDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupDeleteDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupDeleteDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupDeleteDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupDeleteDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupDeleteDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupDeleteDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupDeleteDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupDeleteDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupListResponseDto {
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance.
|
||||
DatabaseBackupListResponseDto({
|
||||
this.backups = const [],
|
||||
});
|
||||
|
||||
List<String> backups;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupListResponseDto &&
|
||||
_deepEquality.equals(other.backups, backups);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backups.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupListResponseDto[backups=$backups]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backups'] = this.backups;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupListResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupListResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupListResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupListResponseDto(
|
||||
backups: json[r'backups'] is Iterable
|
||||
? (json[r'backups'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupListResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupListResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupListResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupListResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupListResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupListResponseDto-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupListResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupListResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupListResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backups',
|
||||
};
|
||||
}
|
||||
|
||||
3
mobile/openapi/lib/model/maintenance_action.dart
generated
3
mobile/openapi/lib/model/maintenance_action.dart
generated
@@ -25,13 +25,11 @@ class MaintenanceAction {
|
||||
|
||||
static const start = MaintenanceAction._(r'start');
|
||||
static const end = MaintenanceAction._(r'end');
|
||||
static const restoreDatabase = MaintenanceAction._(r'restore_database');
|
||||
|
||||
/// List of all possible values in this [enum][MaintenanceAction].
|
||||
static const values = <MaintenanceAction>[
|
||||
start,
|
||||
end,
|
||||
restoreDatabase,
|
||||
];
|
||||
|
||||
static MaintenanceAction? fromJson(dynamic value) => MaintenanceActionTypeTransformer().decode(value);
|
||||
@@ -72,7 +70,6 @@ class MaintenanceActionTypeTransformer {
|
||||
switch (data) {
|
||||
case r'start': return MaintenanceAction.start;
|
||||
case r'end': return MaintenanceAction.end;
|
||||
case r'restore_database': return MaintenanceAction.restoreDatabase;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallResponseDto {
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance.
|
||||
MaintenanceDetectInstallResponseDto({
|
||||
this.storage = const [],
|
||||
});
|
||||
|
||||
List<MaintenanceDetectInstallStorageFolderDto> storage;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallResponseDto &&
|
||||
_deepEquality.equals(other.storage, storage);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(storage.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallResponseDto[storage=$storage]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'storage'] = this.storage;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallResponseDto(
|
||||
storage: MaintenanceDetectInstallStorageFolderDto.listFromJson(json[r'storage']),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'storage',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceDetectInstallStorageFolderDto {
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance.
|
||||
MaintenanceDetectInstallStorageFolderDto({
|
||||
required this.files,
|
||||
required this.folder,
|
||||
required this.readable,
|
||||
required this.writable,
|
||||
});
|
||||
|
||||
num files;
|
||||
|
||||
StorageFolder folder;
|
||||
|
||||
bool readable;
|
||||
|
||||
bool writable;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceDetectInstallStorageFolderDto &&
|
||||
other.files == files &&
|
||||
other.folder == folder &&
|
||||
other.readable == readable &&
|
||||
other.writable == writable;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(files.hashCode) +
|
||||
(folder.hashCode) +
|
||||
(readable.hashCode) +
|
||||
(writable.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceDetectInstallStorageFolderDto[files=$files, folder=$folder, readable=$readable, writable=$writable]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'files'] = this.files;
|
||||
json[r'folder'] = this.folder;
|
||||
json[r'readable'] = this.readable;
|
||||
json[r'writable'] = this.writable;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceDetectInstallStorageFolderDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceDetectInstallStorageFolderDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceDetectInstallStorageFolderDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceDetectInstallStorageFolderDto(
|
||||
files: num.parse('${json[r'files']}'),
|
||||
folder: StorageFolder.fromJson(json[r'folder'])!,
|
||||
readable: mapValueOfType<bool>(json, r'readable')!,
|
||||
writable: mapValueOfType<bool>(json, r'writable')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceDetectInstallStorageFolderDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceDetectInstallStorageFolderDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceDetectInstallStorageFolderDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceDetectInstallStorageFolderDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceDetectInstallStorageFolderDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceDetectInstallStorageFolderDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceDetectInstallStorageFolderDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceDetectInstallStorageFolderDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceDetectInstallStorageFolderDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'files',
|
||||
'folder',
|
||||
'readable',
|
||||
'writable',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,158 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class MaintenanceStatusResponseDto {
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance.
|
||||
MaintenanceStatusResponseDto({
|
||||
required this.action,
|
||||
required this.active,
|
||||
this.error,
|
||||
this.progress,
|
||||
this.task,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
bool active;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? error;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
num? progress;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? task;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is MaintenanceStatusResponseDto &&
|
||||
other.action == action &&
|
||||
other.active == active &&
|
||||
other.error == error &&
|
||||
other.progress == progress &&
|
||||
other.task == task;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode) +
|
||||
(active.hashCode) +
|
||||
(error == null ? 0 : error!.hashCode) +
|
||||
(progress == null ? 0 : progress!.hashCode) +
|
||||
(task == null ? 0 : task!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'MaintenanceStatusResponseDto[action=$action, active=$active, error=$error, progress=$progress, task=$task]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
json[r'active'] = this.active;
|
||||
if (this.error != null) {
|
||||
json[r'error'] = this.error;
|
||||
} else {
|
||||
// json[r'error'] = null;
|
||||
}
|
||||
if (this.progress != null) {
|
||||
json[r'progress'] = this.progress;
|
||||
} else {
|
||||
// json[r'progress'] = null;
|
||||
}
|
||||
if (this.task != null) {
|
||||
json[r'task'] = this.task;
|
||||
} else {
|
||||
// json[r'task'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [MaintenanceStatusResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static MaintenanceStatusResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "MaintenanceStatusResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return MaintenanceStatusResponseDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
active: mapValueOfType<bool>(json, r'active')!,
|
||||
error: mapValueOfType<String>(json, r'error'),
|
||||
progress: num.parse('${json[r'progress']}'),
|
||||
task: mapValueOfType<String>(json, r'task'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<MaintenanceStatusResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <MaintenanceStatusResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, MaintenanceStatusResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, MaintenanceStatusResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = MaintenanceStatusResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of MaintenanceStatusResponseDto-objects as value to a dart map
|
||||
static Map<String, List<MaintenanceStatusResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<MaintenanceStatusResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = MaintenanceStatusResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'action',
|
||||
'active',
|
||||
};
|
||||
}
|
||||
|
||||
12
mobile/openapi/lib/model/permission.dart
generated
12
mobile/openapi/lib/model/permission.dart
generated
@@ -58,10 +58,6 @@ class Permission {
|
||||
static const authPeriodChangePassword = Permission._(r'auth.changePassword');
|
||||
static const authDevicePeriodDelete = Permission._(r'authDevice.delete');
|
||||
static const archivePeriodRead = Permission._(r'archive.read');
|
||||
static const backupPeriodList = Permission._(r'backup.list');
|
||||
static const backupPeriodDownload = Permission._(r'backup.download');
|
||||
static const backupPeriodUpload = Permission._(r'backup.upload');
|
||||
static const backupPeriodDelete = Permission._(r'backup.delete');
|
||||
static const duplicatePeriodRead = Permission._(r'duplicate.read');
|
||||
static const duplicatePeriodDelete = Permission._(r'duplicate.delete');
|
||||
static const facePeriodCreate = Permission._(r'face.create');
|
||||
@@ -210,10 +206,6 @@ class Permission {
|
||||
authPeriodChangePassword,
|
||||
authDevicePeriodDelete,
|
||||
archivePeriodRead,
|
||||
backupPeriodList,
|
||||
backupPeriodDownload,
|
||||
backupPeriodUpload,
|
||||
backupPeriodDelete,
|
||||
duplicatePeriodRead,
|
||||
duplicatePeriodDelete,
|
||||
facePeriodCreate,
|
||||
@@ -397,10 +389,6 @@ class PermissionTypeTransformer {
|
||||
case r'auth.changePassword': return Permission.authPeriodChangePassword;
|
||||
case r'authDevice.delete': return Permission.authDevicePeriodDelete;
|
||||
case r'archive.read': return Permission.archivePeriodRead;
|
||||
case r'backup.list': return Permission.backupPeriodList;
|
||||
case r'backup.download': return Permission.backupPeriodDownload;
|
||||
case r'backup.upload': return Permission.backupPeriodUpload;
|
||||
case r'backup.delete': return Permission.backupPeriodDelete;
|
||||
case r'duplicate.read': return Permission.duplicatePeriodRead;
|
||||
case r'duplicate.delete': return Permission.duplicatePeriodDelete;
|
||||
case r'face.create': return Permission.facePeriodCreate;
|
||||
|
||||
@@ -32,7 +32,7 @@ class PluginActionResponseDto {
|
||||
|
||||
Object? schema;
|
||||
|
||||
List<PluginContext> supportedContexts;
|
||||
List<PluginContextType> supportedContexts;
|
||||
|
||||
String title;
|
||||
|
||||
@@ -90,7 +90,7 @@ class PluginActionResponseDto {
|
||||
methodName: mapValueOfType<String>(json, r'methodName')!,
|
||||
pluginId: mapValueOfType<String>(json, r'pluginId')!,
|
||||
schema: mapValueOfType<Object>(json, r'schema'),
|
||||
supportedContexts: PluginContext.listFromJson(json[r'supportedContexts']),
|
||||
supportedContexts: PluginContextType.listFromJson(json[r'supportedContexts']),
|
||||
title: mapValueOfType<String>(json, r'title')!,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class PluginContext {
|
||||
class PluginContextType {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const PluginContext._(this.value);
|
||||
const PluginContextType._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
@@ -23,24 +23,24 @@ class PluginContext {
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const asset = PluginContext._(r'asset');
|
||||
static const album = PluginContext._(r'album');
|
||||
static const person = PluginContext._(r'person');
|
||||
static const asset = PluginContextType._(r'asset');
|
||||
static const album = PluginContextType._(r'album');
|
||||
static const person = PluginContextType._(r'person');
|
||||
|
||||
/// List of all possible values in this [enum][PluginContext].
|
||||
static const values = <PluginContext>[
|
||||
/// List of all possible values in this [enum][PluginContextType].
|
||||
static const values = <PluginContextType>[
|
||||
asset,
|
||||
album,
|
||||
person,
|
||||
];
|
||||
|
||||
static PluginContext? fromJson(dynamic value) => PluginContextTypeTransformer().decode(value);
|
||||
static PluginContextType? fromJson(dynamic value) => PluginContextTypeTypeTransformer().decode(value);
|
||||
|
||||
static List<PluginContext> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <PluginContext>[];
|
||||
static List<PluginContextType> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <PluginContextType>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = PluginContext.fromJson(row);
|
||||
final value = PluginContextType.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
@@ -50,16 +50,16 @@ class PluginContext {
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [PluginContext] to String,
|
||||
/// and [decode] dynamic data back to [PluginContext].
|
||||
class PluginContextTypeTransformer {
|
||||
factory PluginContextTypeTransformer() => _instance ??= const PluginContextTypeTransformer._();
|
||||
/// Transformation class that can [encode] an instance of [PluginContextType] to String,
|
||||
/// and [decode] dynamic data back to [PluginContextType].
|
||||
class PluginContextTypeTypeTransformer {
|
||||
factory PluginContextTypeTypeTransformer() => _instance ??= const PluginContextTypeTypeTransformer._();
|
||||
|
||||
const PluginContextTypeTransformer._();
|
||||
const PluginContextTypeTypeTransformer._();
|
||||
|
||||
String encode(PluginContext data) => data.value;
|
||||
String encode(PluginContextType data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a PluginContext.
|
||||
/// Decodes a [dynamic value][data] to a PluginContextType.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
@@ -67,12 +67,12 @@ class PluginContextTypeTransformer {
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
PluginContext? decode(dynamic data, {bool allowNull = true}) {
|
||||
PluginContextType? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'asset': return PluginContext.asset;
|
||||
case r'album': return PluginContext.album;
|
||||
case r'person': return PluginContext.person;
|
||||
case r'asset': return PluginContextType.asset;
|
||||
case r'album': return PluginContextType.album;
|
||||
case r'person': return PluginContextType.person;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
@@ -82,7 +82,7 @@ class PluginContextTypeTransformer {
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [PluginContextTypeTransformer] instance.
|
||||
static PluginContextTypeTransformer? _instance;
|
||||
/// Singleton [PluginContextTypeTypeTransformer] instance.
|
||||
static PluginContextTypeTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ class PluginFilterResponseDto {
|
||||
|
||||
Object? schema;
|
||||
|
||||
List<PluginContext> supportedContexts;
|
||||
List<PluginContextType> supportedContexts;
|
||||
|
||||
String title;
|
||||
|
||||
@@ -90,7 +90,7 @@ class PluginFilterResponseDto {
|
||||
methodName: mapValueOfType<String>(json, r'methodName')!,
|
||||
pluginId: mapValueOfType<String>(json, r'pluginId')!,
|
||||
schema: mapValueOfType<Object>(json, r'schema'),
|
||||
supportedContexts: PluginContext.listFromJson(json[r'supportedContexts']),
|
||||
supportedContexts: PluginContextType.listFromJson(json[r'supportedContexts']),
|
||||
title: mapValueOfType<String>(json, r'title')!,
|
||||
);
|
||||
}
|
||||
|
||||
107
mobile/openapi/lib/model/plugin_trigger_response_dto.dart
generated
Normal file
107
mobile/openapi/lib/model/plugin_trigger_response_dto.dart
generated
Normal file
@@ -0,0 +1,107 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class PluginTriggerResponseDto {
|
||||
/// Returns a new [PluginTriggerResponseDto] instance.
|
||||
PluginTriggerResponseDto({
|
||||
required this.contextType,
|
||||
required this.type,
|
||||
});
|
||||
|
||||
PluginContextType contextType;
|
||||
|
||||
PluginTriggerType type;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is PluginTriggerResponseDto &&
|
||||
other.contextType == contextType &&
|
||||
other.type == type;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(contextType.hashCode) +
|
||||
(type.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'PluginTriggerResponseDto[contextType=$contextType, type=$type]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'contextType'] = this.contextType;
|
||||
json[r'type'] = this.type;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [PluginTriggerResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static PluginTriggerResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "PluginTriggerResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return PluginTriggerResponseDto(
|
||||
contextType: PluginContextType.fromJson(json[r'contextType'])!,
|
||||
type: PluginTriggerType.fromJson(json[r'type'])!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<PluginTriggerResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <PluginTriggerResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = PluginTriggerResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, PluginTriggerResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, PluginTriggerResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = PluginTriggerResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of PluginTriggerResponseDto-objects as value to a dart map
|
||||
static Map<String, List<PluginTriggerResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<PluginTriggerResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = PluginTriggerResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'contextType',
|
||||
'type',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -14,41 +14,25 @@ class SetMaintenanceModeDto {
|
||||
/// Returns a new [SetMaintenanceModeDto] instance.
|
||||
SetMaintenanceModeDto({
|
||||
required this.action,
|
||||
this.restoreBackupFilename,
|
||||
});
|
||||
|
||||
MaintenanceAction action;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
String? restoreBackupFilename;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SetMaintenanceModeDto &&
|
||||
other.action == action &&
|
||||
other.restoreBackupFilename == restoreBackupFilename;
|
||||
other.action == action;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(action.hashCode) +
|
||||
(restoreBackupFilename == null ? 0 : restoreBackupFilename!.hashCode);
|
||||
(action.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action, restoreBackupFilename=$restoreBackupFilename]';
|
||||
String toString() => 'SetMaintenanceModeDto[action=$action]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'action'] = this.action;
|
||||
if (this.restoreBackupFilename != null) {
|
||||
json[r'restoreBackupFilename'] = this.restoreBackupFilename;
|
||||
} else {
|
||||
// json[r'restoreBackupFilename'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -62,7 +46,6 @@ class SetMaintenanceModeDto {
|
||||
|
||||
return SetMaintenanceModeDto(
|
||||
action: MaintenanceAction.fromJson(json[r'action'])!,
|
||||
restoreBackupFilename: mapValueOfType<String>(json, r'restoreBackupFilename'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
97
mobile/openapi/lib/model/storage_folder.dart
generated
97
mobile/openapi/lib/model/storage_folder.dart
generated
@@ -1,97 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class StorageFolder {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const StorageFolder._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const encodedVideo = StorageFolder._(r'encoded-video');
|
||||
static const library_ = StorageFolder._(r'library');
|
||||
static const upload = StorageFolder._(r'upload');
|
||||
static const profile = StorageFolder._(r'profile');
|
||||
static const thumbs = StorageFolder._(r'thumbs');
|
||||
static const backups = StorageFolder._(r'backups');
|
||||
|
||||
/// List of all possible values in this [enum][StorageFolder].
|
||||
static const values = <StorageFolder>[
|
||||
encodedVideo,
|
||||
library_,
|
||||
upload,
|
||||
profile,
|
||||
thumbs,
|
||||
backups,
|
||||
];
|
||||
|
||||
static StorageFolder? fromJson(dynamic value) => StorageFolderTypeTransformer().decode(value);
|
||||
|
||||
static List<StorageFolder> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <StorageFolder>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = StorageFolder.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [StorageFolder] to String,
|
||||
/// and [decode] dynamic data back to [StorageFolder].
|
||||
class StorageFolderTypeTransformer {
|
||||
factory StorageFolderTypeTransformer() => _instance ??= const StorageFolderTypeTransformer._();
|
||||
|
||||
const StorageFolderTypeTransformer._();
|
||||
|
||||
String encode(StorageFolder data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a StorageFolder.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
StorageFolder? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'encoded-video': return StorageFolder.encodedVideo;
|
||||
case r'library': return StorageFolder.library_;
|
||||
case r'upload': return StorageFolder.upload;
|
||||
case r'profile': return StorageFolder.profile;
|
||||
case r'thumbs': return StorageFolder.thumbs;
|
||||
case r'backups': return StorageFolder.backups;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [StorageFolderTypeTransformer] instance.
|
||||
static StorageFolderTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
78
mobile/openapi/lib/model/workflow_response_dto.dart
generated
78
mobile/openapi/lib/model/workflow_response_dto.dart
generated
@@ -40,7 +40,7 @@ class WorkflowResponseDto {
|
||||
|
||||
String ownerId;
|
||||
|
||||
WorkflowResponseDtoTriggerTypeEnum triggerType;
|
||||
PluginTriggerType triggerType;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowResponseDto &&
|
||||
@@ -105,7 +105,7 @@ class WorkflowResponseDto {
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
name: mapValueOfType<String>(json, r'name'),
|
||||
ownerId: mapValueOfType<String>(json, r'ownerId')!,
|
||||
triggerType: WorkflowResponseDtoTriggerTypeEnum.fromJson(json[r'triggerType'])!,
|
||||
triggerType: PluginTriggerType.fromJson(json[r'triggerType'])!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -165,77 +165,3 @@ class WorkflowResponseDto {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
class WorkflowResponseDtoTriggerTypeEnum {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const WorkflowResponseDtoTriggerTypeEnum._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const assetCreate = WorkflowResponseDtoTriggerTypeEnum._(r'AssetCreate');
|
||||
static const personRecognized = WorkflowResponseDtoTriggerTypeEnum._(r'PersonRecognized');
|
||||
|
||||
/// List of all possible values in this [enum][WorkflowResponseDtoTriggerTypeEnum].
|
||||
static const values = <WorkflowResponseDtoTriggerTypeEnum>[
|
||||
assetCreate,
|
||||
personRecognized,
|
||||
];
|
||||
|
||||
static WorkflowResponseDtoTriggerTypeEnum? fromJson(dynamic value) => WorkflowResponseDtoTriggerTypeEnumTypeTransformer().decode(value);
|
||||
|
||||
static List<WorkflowResponseDtoTriggerTypeEnum> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <WorkflowResponseDtoTriggerTypeEnum>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = WorkflowResponseDtoTriggerTypeEnum.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [WorkflowResponseDtoTriggerTypeEnum] to String,
|
||||
/// and [decode] dynamic data back to [WorkflowResponseDtoTriggerTypeEnum].
|
||||
class WorkflowResponseDtoTriggerTypeEnumTypeTransformer {
|
||||
factory WorkflowResponseDtoTriggerTypeEnumTypeTransformer() => _instance ??= const WorkflowResponseDtoTriggerTypeEnumTypeTransformer._();
|
||||
|
||||
const WorkflowResponseDtoTriggerTypeEnumTypeTransformer._();
|
||||
|
||||
String encode(WorkflowResponseDtoTriggerTypeEnum data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a WorkflowResponseDtoTriggerTypeEnum.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
WorkflowResponseDtoTriggerTypeEnum? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'AssetCreate': return WorkflowResponseDtoTriggerTypeEnum.assetCreate;
|
||||
case r'PersonRecognized': return WorkflowResponseDtoTriggerTypeEnum.personRecognized;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [WorkflowResponseDtoTriggerTypeEnumTypeTransformer] instance.
|
||||
static WorkflowResponseDtoTriggerTypeEnumTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
|
||||
|
||||
23
mobile/openapi/lib/model/workflow_update_dto.dart
generated
23
mobile/openapi/lib/model/workflow_update_dto.dart
generated
@@ -18,6 +18,7 @@ class WorkflowUpdateDto {
|
||||
this.enabled,
|
||||
this.filters = const [],
|
||||
this.name,
|
||||
this.triggerType,
|
||||
});
|
||||
|
||||
List<WorkflowActionItemDto> actions;
|
||||
@@ -48,13 +49,22 @@ class WorkflowUpdateDto {
|
||||
///
|
||||
String? name;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
PluginTriggerType? triggerType;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowUpdateDto &&
|
||||
_deepEquality.equals(other.actions, actions) &&
|
||||
other.description == description &&
|
||||
other.enabled == enabled &&
|
||||
_deepEquality.equals(other.filters, filters) &&
|
||||
other.name == name;
|
||||
other.name == name &&
|
||||
other.triggerType == triggerType;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
@@ -63,10 +73,11 @@ class WorkflowUpdateDto {
|
||||
(description == null ? 0 : description!.hashCode) +
|
||||
(enabled == null ? 0 : enabled!.hashCode) +
|
||||
(filters.hashCode) +
|
||||
(name == null ? 0 : name!.hashCode);
|
||||
(name == null ? 0 : name!.hashCode) +
|
||||
(triggerType == null ? 0 : triggerType!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowUpdateDto[actions=$actions, description=$description, enabled=$enabled, filters=$filters, name=$name]';
|
||||
String toString() => 'WorkflowUpdateDto[actions=$actions, description=$description, enabled=$enabled, filters=$filters, name=$name, triggerType=$triggerType]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -87,6 +98,11 @@ class WorkflowUpdateDto {
|
||||
} else {
|
||||
// json[r'name'] = null;
|
||||
}
|
||||
if (this.triggerType != null) {
|
||||
json[r'triggerType'] = this.triggerType;
|
||||
} else {
|
||||
// json[r'triggerType'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -104,6 +120,7 @@ class WorkflowUpdateDto {
|
||||
enabled: mapValueOfType<bool>(json, r'enabled'),
|
||||
filters: WorkflowFilterItemDto.listFromJson(json[r'filters']),
|
||||
name: mapValueOfType<String>(json, r'name'),
|
||||
triggerType: PluginTriggerType.fromJson(json[r'triggerType']),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -27,6 +27,10 @@ class PlatformAsset {
|
||||
final int orientation;
|
||||
final bool isFavorite;
|
||||
|
||||
final int? adjustmentTime;
|
||||
final double? latitude;
|
||||
final double? longitude;
|
||||
|
||||
const PlatformAsset({
|
||||
required this.id,
|
||||
required this.name,
|
||||
@@ -38,6 +42,9 @@ class PlatformAsset {
|
||||
this.durationInSeconds = 0,
|
||||
this.orientation = 0,
|
||||
this.isFavorite = false,
|
||||
this.adjustmentTime,
|
||||
this.latitude,
|
||||
this.longitude,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
5
mobile/test/drift/main/generated/schema.dart
generated
5
mobile/test/drift/main/generated/schema.dart
generated
@@ -16,6 +16,7 @@ import 'schema_v10.dart' as v10;
|
||||
import 'schema_v11.dart' as v11;
|
||||
import 'schema_v12.dart' as v12;
|
||||
import 'schema_v13.dart' as v13;
|
||||
import 'schema_v14.dart' as v14;
|
||||
|
||||
class GeneratedHelper implements SchemaInstantiationHelper {
|
||||
@override
|
||||
@@ -47,10 +48,12 @@ class GeneratedHelper implements SchemaInstantiationHelper {
|
||||
return v12.DatabaseAtV12(db);
|
||||
case 13:
|
||||
return v13.DatabaseAtV13(db);
|
||||
case 14:
|
||||
return v14.DatabaseAtV14(db);
|
||||
default:
|
||||
throw MissingSchemaException(version, versions);
|
||||
}
|
||||
}
|
||||
|
||||
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13];
|
||||
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14];
|
||||
}
|
||||
|
||||
7878
mobile/test/drift/main/generated/schema_v14.dart
generated
Normal file
7878
mobile/test/drift/main/generated/schema_v14.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -322,237 +322,6 @@
|
||||
"x-immich-state": "Stable"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups": {
|
||||
"delete": {
|
||||
"description": "Delete a backup by its filename",
|
||||
"operationId": "deleteDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupDeleteDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Delete database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.delete",
|
||||
"x-immich-state": "Alpha"
|
||||
},
|
||||
"get": {
|
||||
"description": "Get the list of the successful and failed backups",
|
||||
"operationId": "listDatabaseBackups",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupListResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "List database backups",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/start-restore": {
|
||||
"post": {
|
||||
"description": "Put Immich into maintenance mode to restore a backup (Immich must not be configured)",
|
||||
"operationId": "startDatabaseRestoreFlow",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Start database backup restore flow",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/upload": {
|
||||
"post": {
|
||||
"description": "Uploads .sql/.sql.gz file to restore backup from",
|
||||
"operationId": "uploadDatabaseBackup",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupUploadDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Backup Upload",
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Upload database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.upload",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/database-backups/{filename}": {
|
||||
"get": {
|
||||
"description": "Downloads the database backup file",
|
||||
"operationId": "downloadDatabaseBackup",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "filename",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "string",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Download database backup",
|
||||
"tags": [
|
||||
"Database Backups (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "backup.download",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance": {
|
||||
"post": {
|
||||
"description": "Put Immich into or take it out of maintenance mode",
|
||||
@@ -603,53 +372,6 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/detect-install": {
|
||||
"get": {
|
||||
"description": "Collect integrity checks and other heuristics about local data.",
|
||||
"operationId": "detectPriorInstall",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "Detect existing install",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-admin-only": true,
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "maintenance",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/login": {
|
||||
"post": {
|
||||
"description": "Login with maintenance token or cookie to receive current information and perform further actions.",
|
||||
@@ -694,40 +416,6 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/maintenance/status": {
|
||||
"get": {
|
||||
"description": "Fetch information about the currently running maintenance action.",
|
||||
"operationId": "getMaintenanceStatus",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/MaintenanceStatusResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"summary": "Get maintenance mode status",
|
||||
"tags": [
|
||||
"Maintenance (admin)"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.4.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/admin/notifications": {
|
||||
"post": {
|
||||
"description": "Create a new notification for a specific user.",
|
||||
@@ -8332,6 +8020,55 @@
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/plugins/triggers": {
|
||||
"get": {
|
||||
"description": "Retrieve a list of all available plugin triggers.",
|
||||
"operationId": "getPluginTriggers",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PluginTriggerResponseDto"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"summary": "List all plugin triggers",
|
||||
"tags": [
|
||||
"Plugins"
|
||||
],
|
||||
"x-immich-history": [
|
||||
{
|
||||
"version": "v2.3.0",
|
||||
"state": "Added"
|
||||
},
|
||||
{
|
||||
"version": "v2.3.0",
|
||||
"state": "Alpha"
|
||||
}
|
||||
],
|
||||
"x-immich-permission": "plugin.read",
|
||||
"x-immich-state": "Alpha"
|
||||
}
|
||||
},
|
||||
"/plugins/{id}": {
|
||||
"get": {
|
||||
"description": "Retrieve information about a specific plugin by its ID.",
|
||||
@@ -14608,10 +14345,6 @@
|
||||
"name": "Authentication (admin)",
|
||||
"description": "Administrative endpoints related to authentication."
|
||||
},
|
||||
{
|
||||
"name": "Database Backups (admin)",
|
||||
"description": "Manage backups of the Immich database."
|
||||
},
|
||||
{
|
||||
"name": "Deprecated",
|
||||
"description": "Deprecated endpoints that are planned for removal in the next major release."
|
||||
@@ -16549,43 +16282,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupDeleteDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupListResponseDto": {
|
||||
"properties": {
|
||||
"backups": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"backups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupUploadDto": {
|
||||
"properties": {
|
||||
"file": {
|
||||
"format": "binary",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"DownloadArchiveInfo": {
|
||||
"properties": {
|
||||
"assetIds": {
|
||||
@@ -17252,8 +16948,7 @@
|
||||
"MaintenanceAction": {
|
||||
"enum": [
|
||||
"start",
|
||||
"end",
|
||||
"restore_database"
|
||||
"end"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -17268,47 +16963,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallResponseDto": {
|
||||
"properties": {
|
||||
"storage": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/MaintenanceDetectInstallStorageFolderDto"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"storage"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceDetectInstallStorageFolderDto": {
|
||||
"properties": {
|
||||
"files": {
|
||||
"type": "number"
|
||||
},
|
||||
"folder": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/StorageFolder"
|
||||
}
|
||||
]
|
||||
},
|
||||
"readable": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"writable": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"folder",
|
||||
"readable",
|
||||
"writable"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceLoginDto": {
|
||||
"properties": {
|
||||
"token": {
|
||||
@@ -17317,34 +16971,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"MaintenanceStatusResponseDto": {
|
||||
"properties": {
|
||||
"action": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"active": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"error": {
|
||||
"type": "string"
|
||||
},
|
||||
"progress": {
|
||||
"type": "number"
|
||||
},
|
||||
"task": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"action",
|
||||
"active"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManualJobName": {
|
||||
"enum": [
|
||||
"person-cleanup",
|
||||
@@ -18285,10 +17911,6 @@
|
||||
"auth.changePassword",
|
||||
"authDevice.delete",
|
||||
"archive.read",
|
||||
"backup.list",
|
||||
"backup.download",
|
||||
"backup.upload",
|
||||
"backup.delete",
|
||||
"duplicate.read",
|
||||
"duplicate.delete",
|
||||
"face.create",
|
||||
@@ -18709,7 +18331,7 @@
|
||||
},
|
||||
"supportedContexts": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PluginContext"
|
||||
"$ref": "#/components/schemas/PluginContextType"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
@@ -18728,7 +18350,7 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginContext": {
|
||||
"PluginContextType": {
|
||||
"enum": [
|
||||
"asset",
|
||||
"album",
|
||||
@@ -18756,7 +18378,7 @@
|
||||
},
|
||||
"supportedContexts": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PluginContext"
|
||||
"$ref": "#/components/schemas/PluginContextType"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
@@ -18828,6 +18450,29 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginTriggerResponseDto": {
|
||||
"properties": {
|
||||
"contextType": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PluginContextType"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PluginTriggerType"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"contextType",
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginTriggerType": {
|
||||
"enum": [
|
||||
"AssetCreate",
|
||||
@@ -20027,9 +19672,6 @@
|
||||
"$ref": "#/components/schemas/MaintenanceAction"
|
||||
}
|
||||
]
|
||||
},
|
||||
"restoreBackupFilename": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -20602,17 +20244,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"StorageFolder": {
|
||||
"enum": [
|
||||
"encoded-video",
|
||||
"library",
|
||||
"upload",
|
||||
"profile",
|
||||
"thumbs",
|
||||
"backups"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SyncAckDeleteDto": {
|
||||
"properties": {
|
||||
"types": {
|
||||
@@ -23757,11 +23388,11 @@
|
||||
"type": "string"
|
||||
},
|
||||
"triggerType": {
|
||||
"enum": [
|
||||
"AssetCreate",
|
||||
"PersonRecognized"
|
||||
],
|
||||
"type": "string"
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PluginTriggerType"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -23799,6 +23430,13 @@
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"triggerType": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PluginTriggerType"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -40,27 +40,8 @@ export type ActivityStatisticsResponseDto = {
|
||||
comments: number;
|
||||
likes: number;
|
||||
};
|
||||
export type DatabaseBackupDeleteDto = {
|
||||
backups: string[];
|
||||
};
|
||||
export type DatabaseBackupListResponseDto = {
|
||||
backups: string[];
|
||||
};
|
||||
export type DatabaseBackupUploadDto = {
|
||||
file?: Blob;
|
||||
};
|
||||
export type SetMaintenanceModeDto = {
|
||||
action: MaintenanceAction;
|
||||
restoreBackupFilename?: string;
|
||||
};
|
||||
export type MaintenanceDetectInstallStorageFolderDto = {
|
||||
files: number;
|
||||
folder: StorageFolder;
|
||||
readable: boolean;
|
||||
writable: boolean;
|
||||
};
|
||||
export type MaintenanceDetectInstallResponseDto = {
|
||||
storage: MaintenanceDetectInstallStorageFolderDto[];
|
||||
};
|
||||
export type MaintenanceLoginDto = {
|
||||
token?: string;
|
||||
@@ -68,13 +49,6 @@ export type MaintenanceLoginDto = {
|
||||
export type MaintenanceAuthDto = {
|
||||
username: string;
|
||||
};
|
||||
export type MaintenanceStatusResponseDto = {
|
||||
action: MaintenanceAction;
|
||||
active: boolean;
|
||||
error?: string;
|
||||
progress?: number;
|
||||
task?: string;
|
||||
};
|
||||
export type NotificationCreateDto = {
|
||||
data?: object;
|
||||
description?: string | null;
|
||||
@@ -968,7 +942,7 @@ export type PluginActionResponseDto = {
|
||||
methodName: string;
|
||||
pluginId: string;
|
||||
schema: object | null;
|
||||
supportedContexts: PluginContext[];
|
||||
supportedContexts: PluginContextType[];
|
||||
title: string;
|
||||
};
|
||||
export type PluginFilterResponseDto = {
|
||||
@@ -977,7 +951,7 @@ export type PluginFilterResponseDto = {
|
||||
methodName: string;
|
||||
pluginId: string;
|
||||
schema: object | null;
|
||||
supportedContexts: PluginContext[];
|
||||
supportedContexts: PluginContextType[];
|
||||
title: string;
|
||||
};
|
||||
export type PluginResponseDto = {
|
||||
@@ -992,6 +966,10 @@ export type PluginResponseDto = {
|
||||
updatedAt: string;
|
||||
version: string;
|
||||
};
|
||||
export type PluginTriggerResponseDto = {
|
||||
contextType: PluginContextType;
|
||||
"type": PluginTriggerType;
|
||||
};
|
||||
export type QueueResponseDto = {
|
||||
isPaused: boolean;
|
||||
name: QueueName;
|
||||
@@ -1776,7 +1754,7 @@ export type WorkflowResponseDto = {
|
||||
id: string;
|
||||
name: string | null;
|
||||
ownerId: string;
|
||||
triggerType: TriggerType;
|
||||
triggerType: PluginTriggerType;
|
||||
};
|
||||
export type WorkflowActionItemDto = {
|
||||
actionConfig?: object;
|
||||
@@ -1800,6 +1778,7 @@ export type WorkflowUpdateDto = {
|
||||
enabled?: boolean;
|
||||
filters?: WorkflowFilterItemDto[];
|
||||
name?: string;
|
||||
triggerType?: PluginTriggerType;
|
||||
};
|
||||
/**
|
||||
* List all activities
|
||||
@@ -1876,63 +1855,6 @@ export function unlinkAllOAuthAccountsAdmin(opts?: Oazapfts.RequestOpts) {
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Delete database backup
|
||||
*/
|
||||
export function deleteDatabaseBackup({ databaseBackupDeleteDto }: {
|
||||
databaseBackupDeleteDto: DatabaseBackupDeleteDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups", oazapfts.json({
|
||||
...opts,
|
||||
method: "DELETE",
|
||||
body: databaseBackupDeleteDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* List database backups
|
||||
*/
|
||||
export function listDatabaseBackups(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: DatabaseBackupListResponseDto;
|
||||
}>("/admin/database-backups", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Start database backup restore flow
|
||||
*/
|
||||
export function startDatabaseRestoreFlow(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/start-restore", {
|
||||
...opts,
|
||||
method: "POST"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Upload database backup
|
||||
*/
|
||||
export function uploadDatabaseBackup({ databaseBackupUploadDto }: {
|
||||
databaseBackupUploadDto: DatabaseBackupUploadDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/admin/database-backups/upload", oazapfts.multipart({
|
||||
...opts,
|
||||
method: "POST",
|
||||
body: databaseBackupUploadDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Download database backup
|
||||
*/
|
||||
export function downloadDatabaseBackup({ filename }: {
|
||||
filename: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchBlob<{
|
||||
status: 200;
|
||||
data: Blob;
|
||||
}>(`/admin/database-backups/${encodeURIComponent(filename)}`, {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Set maintenance mode
|
||||
*/
|
||||
@@ -1945,17 +1867,6 @@ export function setMaintenanceMode({ setMaintenanceModeDto }: {
|
||||
body: setMaintenanceModeDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Detect existing install
|
||||
*/
|
||||
export function detectPriorInstall(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceDetectInstallResponseDto;
|
||||
}>("/admin/maintenance/detect-install", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Log into maintenance mode
|
||||
*/
|
||||
@@ -1971,17 +1882,6 @@ export function maintenanceLogin({ maintenanceLoginDto }: {
|
||||
body: maintenanceLoginDto
|
||||
})));
|
||||
}
|
||||
/**
|
||||
* Get maintenance mode status
|
||||
*/
|
||||
export function getMaintenanceStatus(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: MaintenanceStatusResponseDto;
|
||||
}>("/admin/maintenance/status", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Create a notification
|
||||
*/
|
||||
@@ -3761,6 +3661,17 @@ export function getPlugins(opts?: Oazapfts.RequestOpts) {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* List all plugin triggers
|
||||
*/
|
||||
export function getPluginTriggers(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: PluginTriggerResponseDto[];
|
||||
}>("/plugins/triggers", {
|
||||
...opts
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Retrieve a plugin
|
||||
*/
|
||||
@@ -5245,16 +5156,7 @@ export enum UserAvatarColor {
|
||||
}
|
||||
export enum MaintenanceAction {
|
||||
Start = "start",
|
||||
End = "end",
|
||||
RestoreDatabase = "restore_database"
|
||||
}
|
||||
export enum StorageFolder {
|
||||
EncodedVideo = "encoded-video",
|
||||
Library = "library",
|
||||
Upload = "upload",
|
||||
Profile = "profile",
|
||||
Thumbs = "thumbs",
|
||||
Backups = "backups"
|
||||
End = "end"
|
||||
}
|
||||
export enum NotificationLevel {
|
||||
Success = "success",
|
||||
@@ -5348,10 +5250,6 @@ export enum Permission {
|
||||
AuthChangePassword = "auth.changePassword",
|
||||
AuthDeviceDelete = "authDevice.delete",
|
||||
ArchiveRead = "archive.read",
|
||||
BackupList = "backup.list",
|
||||
BackupDownload = "backup.download",
|
||||
BackupUpload = "backup.upload",
|
||||
BackupDelete = "backup.delete",
|
||||
DuplicateRead = "duplicate.read",
|
||||
DuplicateDelete = "duplicate.delete",
|
||||
FaceCreate = "face.create",
|
||||
@@ -5536,11 +5434,15 @@ export enum PartnerDirection {
|
||||
SharedBy = "shared-by",
|
||||
SharedWith = "shared-with"
|
||||
}
|
||||
export enum PluginContext {
|
||||
export enum PluginContextType {
|
||||
Asset = "asset",
|
||||
Album = "album",
|
||||
Person = "person"
|
||||
}
|
||||
export enum PluginTriggerType {
|
||||
AssetCreate = "AssetCreate",
|
||||
PersonRecognized = "PersonRecognized"
|
||||
}
|
||||
export enum QueueJobStatus {
|
||||
Active = "active",
|
||||
Failed = "failed",
|
||||
@@ -5757,11 +5659,3 @@ export enum OAuthTokenEndpointAuthMethod {
|
||||
ClientSecretPost = "client_secret_post",
|
||||
ClientSecretBasic = "client_secret_basic"
|
||||
}
|
||||
export enum TriggerType {
|
||||
AssetCreate = "AssetCreate",
|
||||
PersonRecognized = "PersonRecognized"
|
||||
}
|
||||
export enum PluginTriggerType {
|
||||
AssetCreate = "AssetCreate",
|
||||
PersonRecognized = "PersonRecognized"
|
||||
}
|
||||
|
||||
@@ -1,30 +1,36 @@
|
||||
{
|
||||
"name": "immich-core",
|
||||
"version": "2.0.0",
|
||||
"version": "2.0.1",
|
||||
"title": "Immich Core",
|
||||
"description": "Core workflow capabilities for Immich",
|
||||
"author": "Immich Team",
|
||||
|
||||
"wasm": {
|
||||
"path": "dist/plugin.wasm"
|
||||
},
|
||||
|
||||
"filters": [
|
||||
{
|
||||
"methodName": "filterFileName",
|
||||
"title": "Filter by filename",
|
||||
"description": "Filter assets by filename pattern using text matching or regular expressions",
|
||||
"supportedContexts": ["asset"],
|
||||
"supportedContexts": [
|
||||
"asset"
|
||||
],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"title": "Filename pattern",
|
||||
"description": "Text or regex pattern to match against filename"
|
||||
},
|
||||
"matchType": {
|
||||
"type": "string",
|
||||
"enum": ["contains", "regex", "exact"],
|
||||
"title": "Match type",
|
||||
"enum": [
|
||||
"contains",
|
||||
"regex",
|
||||
"exact"
|
||||
],
|
||||
"default": "contains",
|
||||
"description": "Type of pattern matching to perform"
|
||||
},
|
||||
@@ -34,43 +40,57 @@
|
||||
"description": "Whether matching should be case-sensitive"
|
||||
}
|
||||
},
|
||||
"required": ["pattern"]
|
||||
"required": [
|
||||
"pattern"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"methodName": "filterFileType",
|
||||
"title": "Filter by file type",
|
||||
"description": "Filter assets by file type",
|
||||
"supportedContexts": ["asset"],
|
||||
"supportedContexts": [
|
||||
"asset"
|
||||
],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"fileTypes": {
|
||||
"type": "array",
|
||||
"title": "File types",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": ["IMAGE", "VIDEO"]
|
||||
"enum": [
|
||||
"image",
|
||||
"video"
|
||||
]
|
||||
},
|
||||
"description": "Allowed file types"
|
||||
}
|
||||
},
|
||||
"required": ["fileTypes"]
|
||||
"required": [
|
||||
"fileTypes"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"methodName": "filterPerson",
|
||||
"title": "Filter by person",
|
||||
"description": "Filter by detected person",
|
||||
"supportedContexts": ["person"],
|
||||
"supportedContexts": [
|
||||
"person"
|
||||
],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"personIds": {
|
||||
"type": "array",
|
||||
"title": "Person IDs",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "List of person to match"
|
||||
"description": "List of person to match",
|
||||
"subType": "people-picker"
|
||||
},
|
||||
"matchAny": {
|
||||
"type": "boolean",
|
||||
@@ -78,24 +98,29 @@
|
||||
"description": "Match any name (true) or require all names (false)"
|
||||
}
|
||||
},
|
||||
"required": ["personIds"]
|
||||
"required": [
|
||||
"personIds"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"actions": [
|
||||
{
|
||||
"methodName": "actionArchive",
|
||||
"title": "Archive",
|
||||
"description": "Move the asset to archive",
|
||||
"supportedContexts": ["asset"],
|
||||
"supportedContexts": [
|
||||
"asset"
|
||||
],
|
||||
"schema": {}
|
||||
},
|
||||
{
|
||||
"methodName": "actionFavorite",
|
||||
"title": "Favorite",
|
||||
"description": "Mark the asset as favorite or unfavorite",
|
||||
"supportedContexts": ["asset"],
|
||||
"supportedContexts": [
|
||||
"asset"
|
||||
],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -111,16 +136,23 @@
|
||||
"methodName": "actionAddToAlbum",
|
||||
"title": "Add to Album",
|
||||
"description": "Add the item to a specified album",
|
||||
"supportedContexts": ["asset", "person"],
|
||||
"supportedContexts": [
|
||||
"asset",
|
||||
"person"
|
||||
],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"albumId": {
|
||||
"type": "string",
|
||||
"description": "Target album ID"
|
||||
"title": "Album ID",
|
||||
"description": "Target album ID",
|
||||
"subType": "album-picker"
|
||||
}
|
||||
},
|
||||
"required": ["albumId"]
|
||||
"required": [
|
||||
"albumId"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
724
pnpm-lock.yaml
generated
724
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -153,7 +153,7 @@
|
||||
"mock-fs": "^5.2.0",
|
||||
"node-gyp": "^12.0.0",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.0.2",
|
||||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sql-formatter": "^15.0.0",
|
||||
"supertest": "^7.1.0",
|
||||
|
||||
@@ -21,11 +21,8 @@ import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
|
||||
import { repositories } from 'src/repositories';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
@@ -106,9 +103,6 @@ export class ApiModule extends BaseModule {}
|
||||
providers: [
|
||||
ConfigRepository,
|
||||
LoggingRepository,
|
||||
StorageRepository,
|
||||
ProcessRepository,
|
||||
DatabaseRepository,
|
||||
SystemMetadataRepository,
|
||||
AppRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
@@ -122,14 +116,9 @@ export class MaintenanceModule {
|
||||
constructor(
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private maintenanceWorkerService: MaintenanceWorkerService,
|
||||
) {
|
||||
logger.setAppName(this.worker);
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.maintenanceWorkerService.init();
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -141,7 +141,6 @@ export const endpointTags: Record<ApiTag, string> = {
|
||||
[ApiTag.Assets]: 'An asset is an image or video that has been uploaded to Immich.',
|
||||
[ApiTag.Authentication]: 'Endpoints related to user authentication, including OAuth.',
|
||||
[ApiTag.AuthenticationAdmin]: 'Administrative endpoints related to authentication.',
|
||||
[ApiTag.DatabaseBackups]: 'Manage backups of the Immich database.',
|
||||
[ApiTag.Deprecated]: 'Deprecated endpoints that are planned for removal in the next major release.',
|
||||
[ApiTag.Download]: 'Endpoints for downloading assets or collections of assets.',
|
||||
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
import { Body, Controller, Delete, Get, Next, Param, Post, Res, UploadedFile, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { ApiBody, ApiConsumes, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
DatabaseBackupDeleteDto,
|
||||
DatabaseBackupListResponseDto,
|
||||
DatabaseBackupUploadDto,
|
||||
} from 'src/dtos/database-backup.dto';
|
||||
import { ApiTag, ImmichCookie, Permission } from 'src/enum';
|
||||
import { Authenticated, FileResponse, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags(ApiTag.DatabaseBackups)
|
||||
@Controller('admin/database-backups')
|
||||
export class DatabaseBackupController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: DatabaseBackupService,
|
||||
private maintenanceService: MaintenanceService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@Endpoint({
|
||||
summary: 'List database backups',
|
||||
description: 'Get the list of the successful and failed backups',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
@Get(':filename')
|
||||
@FileResponse()
|
||||
@Endpoint({
|
||||
summary: 'Download database backup',
|
||||
description: 'Downloads the database backup file',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDownload, admin: true })
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
): Promise<void> {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
@Delete()
|
||||
@Endpoint({
|
||||
summary: 'Delete database backup',
|
||||
description: 'Delete a backup by its filename',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDelete, admin: true })
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
@Post('start-restore')
|
||||
@Endpoint({
|
||||
summary: 'Start database backup restore flow',
|
||||
description: 'Put Immich into maintenance mode to restore a backup (Immich must not be configured)',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
async startDatabaseRestoreFlow(
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
const { jwt } = await this.maintenanceService.startRestoreFlow();
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
});
|
||||
}
|
||||
|
||||
@Post('upload')
|
||||
@Authenticated({ permission: Permission.BackupUpload, admin: true })
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiBody({ description: 'Backup Upload', type: DatabaseBackupUploadDto })
|
||||
@Endpoint({
|
||||
summary: 'Upload database backup',
|
||||
description: 'Uploads .sql/.sql.gz file to restore backup from',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@ import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
import { DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import { DownloadController } from 'src/controllers/download.controller';
|
||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||
import { FaceController } from 'src/controllers/face.controller';
|
||||
@@ -47,7 +46,6 @@ export const controllers = [
|
||||
AssetMediaController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DatabaseBackupController,
|
||||
DownloadController,
|
||||
DuplicateController,
|
||||
FaceController,
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
import { BadRequestException, Body, Controller, Get, Post, Res } from '@nestjs/common';
|
||||
import { BadRequestException, Body, Controller, Post, Res } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import { ApiTag, ImmichCookie, MaintenanceAction, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
@@ -21,27 +15,6 @@ import { respondWithCookie } from 'src/utils/response';
|
||||
export class MaintenanceController {
|
||||
constructor(private service: MaintenanceService) {}
|
||||
|
||||
@Get('status')
|
||||
@Endpoint({
|
||||
summary: 'Get maintenance mode status',
|
||||
description: 'Fetch information about the currently running maintenance action.',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return this.service.getMaintenanceStatus();
|
||||
}
|
||||
|
||||
@Get('detect-install')
|
||||
@Endpoint({
|
||||
summary: 'Detect existing install',
|
||||
description: 'Collect integrity checks and other heuristics about local data.',
|
||||
history: new HistoryBuilder().added('v2.4.0').alpha('v2.4.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@Endpoint({
|
||||
summary: 'Log into maintenance mode',
|
||||
@@ -65,8 +38,8 @@ export class MaintenanceController {
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
if (dto.action !== MaintenanceAction.End) {
|
||||
const { jwt } = await this.service.startMaintenance(dto, auth.user.name);
|
||||
if (dto.action === MaintenanceAction.Start) {
|
||||
const { jwt } = await this.service.startMaintenance(auth.user.name);
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Controller, Get, Param } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { PluginResponseDto } from 'src/dtos/plugin.dto';
|
||||
import { PluginResponseDto, PluginTriggerResponseDto } from 'src/dtos/plugin.dto';
|
||||
import { Permission } from 'src/enum';
|
||||
import { Authenticated } from 'src/middleware/auth.guard';
|
||||
import { PluginService } from 'src/services/plugin.service';
|
||||
@@ -12,6 +12,17 @@ import { UUIDParamDto } from 'src/validation';
|
||||
export class PluginController {
|
||||
constructor(private service: PluginService) {}
|
||||
|
||||
@Get('triggers')
|
||||
@Authenticated({ permission: Permission.PluginRead })
|
||||
@Endpoint({
|
||||
summary: 'List all plugin triggers',
|
||||
description: 'Retrieve a list of all available plugin triggers.',
|
||||
history: new HistoryBuilder().added('v2.3.0').alpha('v2.3.0'),
|
||||
})
|
||||
getPluginTriggers(): PluginTriggerResponseDto[] {
|
||||
return this.service.getTriggers();
|
||||
}
|
||||
|
||||
@Get()
|
||||
@Authenticated({ permission: Permission.PluginRead })
|
||||
@Endpoint({
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
|
||||
export class DatabaseBackupListResponseDto {
|
||||
backups!: string[];
|
||||
}
|
||||
|
||||
export class DatabaseBackupUploadDto {
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
file?: any;
|
||||
}
|
||||
|
||||
export class DatabaseBackupDeleteDto {
|
||||
@IsString({ each: true })
|
||||
backups!: string[];
|
||||
}
|
||||
@@ -1,12 +1,9 @@
|
||||
import { MaintenanceAction, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceAction } from 'src/enum';
|
||||
import { ValidateEnum, ValidateString } from 'src/validation';
|
||||
|
||||
export class SetMaintenanceModeDto {
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
@ValidateString({ optional: true })
|
||||
restoreBackupFilename?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceLoginDto {
|
||||
@@ -17,26 +14,3 @@ export class MaintenanceLoginDto {
|
||||
export class MaintenanceAuthDto {
|
||||
username!: string;
|
||||
}
|
||||
|
||||
export class MaintenanceStatusResponseDto {
|
||||
active!: boolean;
|
||||
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
progress?: number;
|
||||
task?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallStorageFolderDto {
|
||||
@ValidateEnum({ enum: StorageFolder, name: 'StorageFolder' })
|
||||
folder!: StorageFolder;
|
||||
readable!: boolean;
|
||||
writable!: boolean;
|
||||
files!: number;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallResponseDto {
|
||||
storage!: MaintenanceDetectInstallStorageFolderDto[];
|
||||
}
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
import { IsNotEmpty, IsString } from 'class-validator';
|
||||
import { PluginAction, PluginFilter } from 'src/database';
|
||||
import { PluginContext } from 'src/enum';
|
||||
import { PluginContext as PluginContextType, PluginTriggerType } from 'src/enum';
|
||||
import type { JSONSchema } from 'src/types/plugin-schema.types';
|
||||
import { ValidateEnum } from 'src/validation';
|
||||
|
||||
export class PluginTriggerResponseDto {
|
||||
@ValidateEnum({ enum: PluginTriggerType, name: 'PluginTriggerType' })
|
||||
type!: PluginTriggerType;
|
||||
@ValidateEnum({ enum: PluginContextType, name: 'PluginContextType' })
|
||||
contextType!: PluginContextType;
|
||||
}
|
||||
|
||||
export class PluginResponseDto {
|
||||
id!: string;
|
||||
name!: string;
|
||||
@@ -24,8 +31,8 @@ export class PluginFilterResponseDto {
|
||||
title!: string;
|
||||
description!: string;
|
||||
|
||||
@ValidateEnum({ enum: PluginContext, name: 'PluginContext' })
|
||||
supportedContexts!: PluginContext[];
|
||||
@ValidateEnum({ enum: PluginContextType, name: 'PluginContextType' })
|
||||
supportedContexts!: PluginContextType[];
|
||||
schema!: JSONSchema | null;
|
||||
}
|
||||
|
||||
@@ -36,8 +43,8 @@ export class PluginActionResponseDto {
|
||||
title!: string;
|
||||
description!: string;
|
||||
|
||||
@ValidateEnum({ enum: PluginContext, name: 'PluginContext' })
|
||||
supportedContexts!: PluginContext[];
|
||||
@ValidateEnum({ enum: PluginContextType, name: 'PluginContextType' })
|
||||
supportedContexts!: PluginContextType[];
|
||||
schema!: JSONSchema | null;
|
||||
}
|
||||
|
||||
|
||||
@@ -48,6 +48,9 @@ export class WorkflowCreateDto {
|
||||
}
|
||||
|
||||
export class WorkflowUpdateDto {
|
||||
@ValidateEnum({ enum: PluginTriggerType, name: 'PluginTriggerType', optional: true })
|
||||
triggerType?: PluginTriggerType;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@Optional()
|
||||
@@ -74,6 +77,7 @@ export class WorkflowUpdateDto {
|
||||
export class WorkflowResponseDto {
|
||||
id!: string;
|
||||
ownerId!: string;
|
||||
@ValidateEnum({ enum: PluginTriggerType, name: 'PluginTriggerType' })
|
||||
triggerType!: PluginTriggerType;
|
||||
name!: string | null;
|
||||
description!: string;
|
||||
|
||||
@@ -128,11 +128,6 @@ export enum Permission {
|
||||
|
||||
ArchiveRead = 'archive.read',
|
||||
|
||||
BackupList = 'backup.list',
|
||||
BackupDownload = 'backup.download',
|
||||
BackupUpload = 'backup.upload',
|
||||
BackupDelete = 'backup.delete',
|
||||
|
||||
DuplicateRead = 'duplicate.read',
|
||||
DuplicateDelete = 'duplicate.delete',
|
||||
|
||||
@@ -684,14 +679,12 @@ export enum DatabaseLock {
|
||||
MediaLocation = 700,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
MaintenanceOperation = 621,
|
||||
MemoryCreation = 777,
|
||||
}
|
||||
|
||||
export enum MaintenanceAction {
|
||||
Start = 'start',
|
||||
End = 'end',
|
||||
RestoreDatabase = 'restore_database',
|
||||
}
|
||||
|
||||
export enum ExitCode {
|
||||
@@ -838,7 +831,6 @@ export enum ApiTag {
|
||||
Authentication = 'Authentication',
|
||||
AuthenticationAdmin = 'Authentication (admin)',
|
||||
Assets = 'Assets',
|
||||
DatabaseBackups = 'Database Backups (admin)',
|
||||
Deprecated = 'Deprecated',
|
||||
Download = 'Download',
|
||||
Duplicates = 'Duplicates',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { Kysely } from 'kysely';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
import { ChildProcess, fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PostgresError } from 'postgres';
|
||||
import { ImmichAdminModule } from 'src/app.module';
|
||||
import { DatabaseLock, ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type DB } from 'src/schema';
|
||||
@@ -35,14 +35,16 @@ class Workers {
|
||||
if (isMaintenanceMode) {
|
||||
this.startWorker(ImmichWorker.Maintenance);
|
||||
} else {
|
||||
await this.waitForFreeLock();
|
||||
|
||||
for (const worker of workers) {
|
||||
this.startWorker(worker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise a short-lived Nest application to build configuration
|
||||
* @returns System configuration
|
||||
*/
|
||||
private async isMaintenanceMode(): Promise<boolean> {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
@@ -63,32 +65,6 @@ class Workers {
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForFreeLock() {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
|
||||
let locked = false;
|
||||
while (!locked) {
|
||||
locked = await kysely.connection().execute(async (conn) => {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
}>`SELECT pg_try_advisory_lock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
|
||||
const isLocked = rows[0].pg_try_advisory_lock;
|
||||
|
||||
if (isLocked) {
|
||||
await sql`SELECT pg_advisory_unlock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
}
|
||||
|
||||
return isLocked;
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await kysely.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start an individual worker
|
||||
* @param name Worker
|
||||
|
||||
@@ -7,24 +7,17 @@ import {
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { MaintenanceAuthDto, MaintenanceStatusResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AppRestartEvent } from 'src/repositories/event.repository';
|
||||
import { AppRestartEvent, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
interface ServerEventMap {
|
||||
AppRestart: [AppRestartEvent];
|
||||
MaintenanceStatus: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
export const serverEvents = ['AppRestart'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
interface ClientEventMap {
|
||||
export interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
MaintenanceStatusV1: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
type AuthFn = (client: Socket) => Promise<MaintenanceAuthDto>;
|
||||
type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
@@ -32,11 +25,8 @@ type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
})
|
||||
@Injectable()
|
||||
export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
private statusUpdateFn?: StatusUpdateFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private server?: Server;
|
||||
private websocketServer?: Server;
|
||||
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
@@ -45,46 +35,25 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
this.logger.setContext(MaintenanceWebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(server: Server) {
|
||||
afterInit(websocketServer: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
server.on('AppRestart', () => this.appRepository.exitApp());
|
||||
server.on('MaintenanceStatus', (status) => this.statusUpdateFn?.(status));
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
websocketServer.on('AppRestart', () => this.appRepository.exitApp());
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.server?.emit(event, ...data);
|
||||
this.websocketServer?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends keyof ServerEventMap>(event: T, ...args: ServerEventMap[T]): void {
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
this.websocketServer?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
await this.authFn!(client);
|
||||
await client.join('private');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (private)`);
|
||||
} catch {
|
||||
await client.join('public');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (public)`);
|
||||
}
|
||||
handleConnection(client: Socket) {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
}
|
||||
|
||||
async handleDisconnect(client: Socket) {
|
||||
handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await Promise.allSettled([client.leave('private'), client.leave('public')]);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<MaintenanceAuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
setStatusUpdateFn(fn: (status: MaintenanceStatusResponseDto) => void) {
|
||||
this.statusUpdateFn = fn;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,109 +1,23 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Next,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { Body, Controller, Get, Post, Req, Res } from '@nestjs/common';
|
||||
import { Request, Response } from 'express';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie } from 'src/enum';
|
||||
import { ImmichCookie, MaintenanceAction } from 'src/enum';
|
||||
import { MaintenanceRoute } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
) {}
|
||||
constructor(private service: MaintenanceWorkerService) {}
|
||||
|
||||
/**
|
||||
* {@link _ServerController.getServerConfig }
|
||||
*/
|
||||
@Get('server/config')
|
||||
getServerConfig(): ServerConfigDto {
|
||||
getServerConfig(): Promise<ServerConfigDto> {
|
||||
return this.service.getSystemConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.listDatabaseBackups}
|
||||
*/
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.downloadDatabaseBackup}
|
||||
*/
|
||||
@Get('admin/database-backups/:filename')
|
||||
@MaintenanceRoute()
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.deleteDatabaseBackup}
|
||||
*/
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.uploadDatabaseBackup}
|
||||
*/
|
||||
@Post('admin/database-backups/upload')
|
||||
@MaintenanceRoute()
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
maintenanceStatus(@Req() request: Request): Promise<MaintenanceStatusResponseDto> {
|
||||
return this.service.status(request.cookies[ImmichCookie.MaintenanceToken]);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/detect-install')
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('admin/maintenance/login')
|
||||
async maintenanceLogin(
|
||||
@Req() request: Request,
|
||||
@@ -121,7 +35,9 @@ export class MaintenanceWorkerController {
|
||||
|
||||
@Post('admin/maintenance')
|
||||
@MaintenanceRoute()
|
||||
setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): void {
|
||||
void this.service.setAction(dto);
|
||||
async setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.End) {
|
||||
await this.service.endMaintenance();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +1,25 @@
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
import { automock, getMocks, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
let maintenanceWorkerRepositoryMock: MaintenanceWebsocketRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
maintenanceWebsocketRepositoryMock = automock(MaintenanceWebsocketRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
maintenanceWorkerRepositoryMock = automock(MaintenanceWebsocketRepository, { args: [mocks.logger], strict: false });
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
mocks.app,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
maintenanceWebsocketRepositoryMock,
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
maintenanceWorkerRepositoryMock,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -46,43 +27,14 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
describe('getSystemConfig', () => {
|
||||
it('should respond the server is in maintenance mode', () => {
|
||||
expect(sut.getSystemConfig()).toMatchObject(
|
||||
it('should respond the server is in maintenance mode', async () => {
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject(
|
||||
expect.objectContaining({
|
||||
maintenanceMode: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('ssr');
|
||||
describe.skip('detectMediaLocation');
|
||||
|
||||
describe('setStatus', () => {
|
||||
it('should broadcast status', () => {
|
||||
sut.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalled();
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledTimes(2);
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'Something went wrong, see logs!',
|
||||
});
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -90,14 +42,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should log a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
await expect(sut.logSecret()).resolves.toBeUndefined();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith(expect.stringMatching(RE_LOGIN_URL));
|
||||
|
||||
@@ -118,13 +63,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should parse cookie properly', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
@@ -134,102 +73,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('status', () => {
|
||||
beforeEach(() => {
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
error: 'secret value!',
|
||||
});
|
||||
});
|
||||
|
||||
it('generates private status', async () => {
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime('4h')
|
||||
.sign(new TextEncoder().encode('secret'));
|
||||
|
||||
await expect(sut.status(jwt)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'secret value!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates public status', async () => {
|
||||
await expect(sut.status()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should fail without token', async () => {
|
||||
await expect(sut.login()).rejects.toThrowError(new UnauthorizedException('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should fail with expired JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
|
||||
const jwt = await new SignJWT({})
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -241,13 +91,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should succeed with valid JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -263,232 +107,22 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('setAction'); // just calls setStatus+runAction
|
||||
|
||||
/**
|
||||
* Actions
|
||||
*/
|
||||
|
||||
describe('action: start', () => {
|
||||
it('should not do anything', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
|
||||
expect(mocks.logger.log).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: end', () => {
|
||||
describe('endMaintenance', () => {
|
||||
it('should set maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
await expect(sut.endMaintenance()).resolves.toBeUndefined();
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
expect(maintenanceWorkerRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
expect(maintenanceWorkerRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(mocks.database.tryLock).toHaveBeenCalled();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith('Running maintenance action restore_database');
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: expect.any(Number),
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,38 +4,19 @@ import { NextFunction, Request, Response } from 'express';
|
||||
import { jwtVerify } from 'jose';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { DatabaseLock, ImmichCookie, MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { ImmichCookie, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { createMaintenanceLoginUrl } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -43,50 +24,16 @@ import { getExternalDomain } from 'src/utils/misc';
|
||||
*/
|
||||
@Injectable()
|
||||
export class MaintenanceWorkerService {
|
||||
#secret: string = null!;
|
||||
#status: MaintenanceStatusResponseDto = {
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
};
|
||||
|
||||
constructor(
|
||||
protected logger: LoggingRepository,
|
||||
private appRepository: AppRepository,
|
||||
private configRepository: ConfigRepository,
|
||||
private systemMetadataRepository: SystemMetadataRepository,
|
||||
private maintenanceWebsocketRepository: MaintenanceWebsocketRepository,
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
private maintenanceWorkerRepository: MaintenanceWebsocketRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
mock(status: MaintenanceStatusResponseDto) {
|
||||
this.#secret = 'secret';
|
||||
this.#status = status;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const state = (await this.systemMetadataRepository.get(
|
||||
SystemMetadataKey.MaintenanceMode,
|
||||
)) as MaintenanceModeState & { isMaintenanceMode: true };
|
||||
|
||||
this.#secret = state.secret;
|
||||
this.#status = {
|
||||
active: true,
|
||||
action: state.action.action,
|
||||
};
|
||||
|
||||
StorageCore.setMediaLocation(this.detectMediaLocation());
|
||||
|
||||
this.maintenanceWebsocketRepository.setAuthFn(async (client) => this.authenticate(client.request.headers));
|
||||
this.maintenanceWebsocketRepository.setStatusUpdateFn((status) => (this.#status = status));
|
||||
|
||||
await this.logSecret();
|
||||
void this.runAction(state.action);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _BaseService.configRepos}
|
||||
*/
|
||||
@@ -108,10 +55,22 @@ export class MaintenanceWorkerService {
|
||||
/**
|
||||
* {@link _ServerService.getSystemConfig}
|
||||
*/
|
||||
getSystemConfig() {
|
||||
async getSystemConfig() {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized: true,
|
||||
isOnboarded: true,
|
||||
externalDomain: config.server.externalDomain,
|
||||
publicUsers: config.server.publicUsers,
|
||||
mapDarkStyleUrl: config.map.darkStyle,
|
||||
mapLightStyleUrl: config.map.lightStyle,
|
||||
maintenanceMode: true,
|
||||
} as ServerConfigDto;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -147,89 +106,12 @@ export class MaintenanceWorkerService {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _StorageService.detectMediaLocation}
|
||||
*/
|
||||
detectMediaLocation(): string {
|
||||
const envData = this.configRepository.getEnv();
|
||||
if (envData.storage.mediaLocation) {
|
||||
return envData.storage.mediaLocation;
|
||||
}
|
||||
|
||||
const targets: string[] = [];
|
||||
const candidates = ['/data', '/usr/src/app/upload'];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const exists = this.storageRepository.existsSync(candidate);
|
||||
if (exists) {
|
||||
targets.push(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.length === 1) {
|
||||
return targets[0];
|
||||
}
|
||||
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: string[] }> {
|
||||
return { backups: await listDatabaseBackups(this.backupRepos) };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
private async secret(): Promise<string> {
|
||||
const state = (await this.systemMetadataRepository.get(SystemMetadataKey.MaintenanceMode)) as {
|
||||
secret: string;
|
||||
};
|
||||
}
|
||||
|
||||
private getStatus(): MaintenanceStatusResponseDto {
|
||||
return this.#status;
|
||||
}
|
||||
|
||||
private getPublicStatus(): MaintenanceStatusResponseDto {
|
||||
const state = structuredClone(this.#status);
|
||||
|
||||
if (state.error) {
|
||||
state.error = 'Something went wrong, see logs!';
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
setStatus(status: MaintenanceStatusResponseDto): void {
|
||||
this.#status = status;
|
||||
this.maintenanceWebsocketRepository.serverSend('MaintenanceStatus', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'private', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'public', this.getPublicStatus());
|
||||
return state.secret;
|
||||
}
|
||||
|
||||
async logSecret(): Promise<void> {
|
||||
@@ -241,7 +123,7 @@ export class MaintenanceWorkerService {
|
||||
{
|
||||
username: 'immich-admin',
|
||||
},
|
||||
this.#secret,
|
||||
await this.secret(),
|
||||
);
|
||||
|
||||
this.logger.log(`\n\n🚧 Immich is in maintenance mode, you can log in using the following URL:\n${url}\n`);
|
||||
@@ -252,120 +134,28 @@ export class MaintenanceWorkerService {
|
||||
return this.login(jwtToken);
|
||||
}
|
||||
|
||||
async status(potentiallyJwt?: string): Promise<MaintenanceStatusResponseDto> {
|
||||
try {
|
||||
await this.login(potentiallyJwt);
|
||||
return this.getStatus();
|
||||
} catch {
|
||||
return this.getPublicStatus();
|
||||
}
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async login(jwt?: string): Promise<MaintenanceAuthDto> {
|
||||
if (!jwt) {
|
||||
throw new UnauthorizedException('Missing JWT Token');
|
||||
}
|
||||
|
||||
const secret = await this.secret();
|
||||
|
||||
try {
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(this.#secret));
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(secret));
|
||||
return result.payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid JWT Token');
|
||||
}
|
||||
}
|
||||
|
||||
async setAction(action: SetMaintenanceModeDto) {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
});
|
||||
|
||||
await this.runAction(action);
|
||||
}
|
||||
|
||||
async runAction(action: SetMaintenanceModeDto) {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.Start: {
|
||||
return;
|
||||
}
|
||||
case MaintenanceAction.End: {
|
||||
return this.endMaintenance();
|
||||
}
|
||||
case MaintenanceAction.RestoreDatabase: {
|
||||
if (!action.restoreBackupFilename) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const lock = await this.databaseRepository.tryLock(DatabaseLock.MaintenanceOperation);
|
||||
if (!lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Running maintenance action ${action.action}`);
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: this.#secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.RestoreDatabase: {
|
||||
await this.restoreBackup(action.restoreBackupFilename);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Encountered error running action: ${error}`);
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
task: 'error',
|
||||
error: '' + error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreBackup(filename: string): Promise<void> {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
progress,
|
||||
task,
|
||||
}),
|
||||
);
|
||||
|
||||
await this.setAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
}
|
||||
|
||||
private async endMaintenance(): Promise<void> {
|
||||
async endMaintenance(): Promise<void> {
|
||||
const state: MaintenanceModeState = { isMaintenanceMode: false as const };
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, state);
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
this.maintenanceWebsocketRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWebsocketRepository.serverSend('AppRestart', state);
|
||||
this.maintenanceWorkerRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWorkerRepository.serverSend('AppRestart', state);
|
||||
this.appRepository.exitApp();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,37 +1,17 @@
|
||||
import { PluginContext, PluginTriggerType } from 'src/enum';
|
||||
import { JSONSchema } from 'src/types/plugin-schema.types';
|
||||
|
||||
export type PluginTrigger = {
|
||||
name: string;
|
||||
type: PluginTriggerType;
|
||||
description: string;
|
||||
context: PluginContext;
|
||||
schema: JSONSchema | null;
|
||||
contextType: PluginContext;
|
||||
};
|
||||
|
||||
export const pluginTriggers: PluginTrigger[] = [
|
||||
{
|
||||
name: 'Asset Uploaded',
|
||||
type: PluginTriggerType.AssetCreate,
|
||||
description: 'Triggered when a new asset is uploaded',
|
||||
context: PluginContext.Asset,
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
assetType: {
|
||||
type: 'string',
|
||||
description: 'Type of the asset',
|
||||
default: 'ALL',
|
||||
enum: ['Image', 'Video', 'All'],
|
||||
},
|
||||
},
|
||||
},
|
||||
contextType: PluginContext.Asset,
|
||||
},
|
||||
{
|
||||
name: 'Person Recognized',
|
||||
type: PluginTriggerType.PersonRecognized,
|
||||
description: 'Triggered when a person is detected in an asset',
|
||||
context: PluginContext.Person,
|
||||
schema: null,
|
||||
contextType: PluginContext.Person,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -369,6 +369,7 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."encodedVideoPath",
|
||||
"asset"."originalPath",
|
||||
"asset"."isOffline",
|
||||
to_json("asset_exif") as "exifInfo",
|
||||
(
|
||||
select
|
||||
|
||||
@@ -7,6 +7,8 @@ from
|
||||
"workflow"
|
||||
where
|
||||
"id" = $1
|
||||
order by
|
||||
"createdAt" desc
|
||||
|
||||
-- WorkflowRepository.getWorkflowsByOwner
|
||||
select
|
||||
@@ -16,7 +18,7 @@ from
|
||||
where
|
||||
"ownerId" = $1
|
||||
order by
|
||||
"name"
|
||||
"createdAt" desc
|
||||
|
||||
-- WorkflowRepository.getWorkflowsByTrigger
|
||||
select
|
||||
|
||||
@@ -232,6 +232,7 @@ export class AssetJobRepository {
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.encodedVideoPath',
|
||||
'asset.originalPath',
|
||||
'asset.isOffline',
|
||||
])
|
||||
.$call(withExif)
|
||||
.select(withFacesAndPeople)
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
|
||||
function* data() {
|
||||
yield 'Hello, world!';
|
||||
}
|
||||
|
||||
describe(ProcessRepository.name, () => {
|
||||
let sut: ProcessRepository;
|
||||
let sink: Writable;
|
||||
|
||||
beforeAll(() => {
|
||||
sut = new ProcessRepository();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sink = new Writable({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should accept stdin / output stdout', async () => {
|
||||
let output = '';
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
output += chunk;
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.createSpawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should drain stdin on process exit', async () => {
|
||||
let resolve1: () => void;
|
||||
let resolve2: () => void;
|
||||
const promise1 = new Promise<void>((r) => (resolve1 = r));
|
||||
const promise2 = new Promise<void>((r) => (resolve2 = r));
|
||||
|
||||
async function* data() {
|
||||
yield 'Hello, world!';
|
||||
await promise1;
|
||||
await promise2;
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
realProcess.stdin.on('close', () => setImmediate(() => resolve2()));
|
||||
|
||||
await pipeline(Readable.from(data()), process);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,109 +1,9 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Duplex } from 'node:stream';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
createSpawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
const process = this.spawn(command, args, options);
|
||||
const duplex = new Duplex({
|
||||
// duplex -> stdin
|
||||
write(chunk, encoding, callback) {
|
||||
// drain the input if process dies
|
||||
if (stdinClosed) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
// handle stream backpressure
|
||||
if (process.stdin.write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
drainCallback = callback;
|
||||
process.stdin.once('drain', () => {
|
||||
drainCallback = undefined;
|
||||
callback();
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
read() {
|
||||
// no-op
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
if (stdinClosed) {
|
||||
callback();
|
||||
} else {
|
||||
process.stdin.end(callback);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// stdout -> duplex
|
||||
process.stdout.on('data', (chunk) => {
|
||||
// handle stream backpressure
|
||||
if (!duplex.push(chunk)) {
|
||||
process.stdout.pause();
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('resume', () => process.stdout.resume());
|
||||
|
||||
// end handling
|
||||
let stdoutClosed = false;
|
||||
function close(error?: Error) {
|
||||
stdinClosed = true;
|
||||
|
||||
if (error) {
|
||||
duplex.destroy(error);
|
||||
} else if (stdoutClosed && typeof process.exitCode === 'number') {
|
||||
duplex.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.on('close', () => {
|
||||
stdoutClosed = true;
|
||||
close();
|
||||
});
|
||||
|
||||
// error handling
|
||||
process.on('error', close);
|
||||
process.stdout.on('error', close);
|
||||
process.stdin.on('error', (error) => {
|
||||
if ((error as { code?: 'EPIPE' })?.code === 'EPIPE') {
|
||||
try {
|
||||
drainCallback!();
|
||||
} catch (error) {
|
||||
close(error as Error);
|
||||
}
|
||||
} else {
|
||||
close(error);
|
||||
}
|
||||
});
|
||||
|
||||
let stderr = '';
|
||||
process.stderr.on('data', (chunk) => (stderr += chunk));
|
||||
|
||||
process.on('exit', (code) => {
|
||||
console.info(`${command} exited (${code})`);
|
||||
|
||||
if (code === 0) {
|
||||
close();
|
||||
} else {
|
||||
close(new Error(`${command} non-zero exit code (${code})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
// attach _process to Duplex for testing suite
|
||||
(duplex as never as { _process: ChildProcessWithoutNullStreams })._process = process;
|
||||
|
||||
return duplex;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { createGunzip, createGzip } from 'node:zlib';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -94,18 +93,6 @@ export class StorageRepository {
|
||||
return { stream: archive, addFile, finalize };
|
||||
}
|
||||
|
||||
createGzip(): PassThrough {
|
||||
return createGzip();
|
||||
}
|
||||
|
||||
createGunzip(): PassThrough {
|
||||
return createGunzip();
|
||||
}
|
||||
|
||||
createPlainReadStream(filepath: string): Readable {
|
||||
return createReadStream(filepath);
|
||||
}
|
||||
|
||||
async createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream> {
|
||||
const { size } = await fs.stat(filepath);
|
||||
await fs.access(filepath, constants.R_OK);
|
||||
|
||||
@@ -12,12 +12,22 @@ export class WorkflowRepository {
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getWorkflow(id: string) {
|
||||
return this.db.selectFrom('workflow').selectAll().where('id', '=', id).executeTakeFirst();
|
||||
return this.db
|
||||
.selectFrom('workflow')
|
||||
.selectAll()
|
||||
.where('id', '=', id)
|
||||
.orderBy('createdAt', 'desc')
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getWorkflowsByOwner(ownerId: string) {
|
||||
return this.db.selectFrom('workflow').selectAll().where('ownerId', '=', ownerId).orderBy('name').execute();
|
||||
return this.db
|
||||
.selectFrom('workflow')
|
||||
.selectAll()
|
||||
.where('ownerId', '=', ownerId)
|
||||
.orderBy('createdAt', 'desc')
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [PluginTriggerType.AssetCreate] })
|
||||
|
||||
@@ -585,8 +585,6 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
assetWithFace.encodedVideoPath, // this value is null
|
||||
undefined, // no sidecar path
|
||||
assetWithFace.originalPath,
|
||||
],
|
||||
},
|
||||
@@ -648,8 +646,6 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
undefined,
|
||||
undefined,
|
||||
'fake_path/asset_1.jpeg',
|
||||
],
|
||||
},
|
||||
@@ -676,8 +672,6 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
undefined,
|
||||
undefined,
|
||||
'fake_path/asset_1.jpeg',
|
||||
],
|
||||
},
|
||||
|
||||
@@ -363,11 +363,11 @@ export class AssetService extends BaseService {
|
||||
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||
|
||||
if (deleteOnDisk) {
|
||||
if (deleteOnDisk && !asset.isOffline) {
|
||||
files.push(sidecarFile?.path, asset.originalPath);
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: files.filter(Boolean) } });
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
@@ -147,7 +147,6 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -166,7 +165,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -175,16 +174,14 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.createSpawnDuplexStream.mock.calls[0];
|
||||
expect(mocks.process.spawn).toHaveBeenCalled();
|
||||
const call = mocks.process.spawn.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
// ['--dbname', '<url>', '--clean', '--if-exists']
|
||||
expect(args[0]).toBe('--dbname');
|
||||
const passedUrl = args[1];
|
||||
expect(passedUrl).not.toContain('uselibpqcompat');
|
||||
expect(passedUrl).toContain('sslmode=require');
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
@@ -199,21 +196,21 @@ describe(BackupService.name, () => {
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
@@ -229,9 +226,9 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -245,12 +242,12 @@ describe(BackupService.name, () => {
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -56,11 +53,16 @@ export class BackupService extends BaseService {
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((fn) => isValidDatabaseRoutineBackupName(fn))
|
||||
.filter((file) => {
|
||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((fn) => isFailedDatabaseBackupName(fn));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
@@ -73,27 +75,123 @@ export class BackupService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
try {
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
const { database } = this.configRepository.getEnv();
|
||||
const config = database.config;
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
|
||||
let connectionUrl: string = isUrlConnection ? config.url : '';
|
||||
if (URL.canParse(connectionUrl)) {
|
||||
// remove known bad url parameters for pg_dumpall
|
||||
const url = new URL(connectionUrl);
|
||||
url.searchParams.delete('uselibpqcompat');
|
||||
connectionUrl = url.toString();
|
||||
}
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', connectionUrl]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? new URL(connectionUrl).password : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error(`Backup failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error(`Gzip failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { jwtVerify } from 'jose';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -94,14 +94,7 @@ describe(CliService.name, () => {
|
||||
});
|
||||
|
||||
it('should disable maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
await expect(sut.disableMaintenanceMode()).resolves.toEqual({
|
||||
alreadyDisabled: false,
|
||||
});
|
||||
@@ -114,14 +107,7 @@ describe(CliService.name, () => {
|
||||
|
||||
describe('enableMaintenanceMode', () => {
|
||||
it('should not do anything if in maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
await expect(sut.enableMaintenanceMode()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
alreadyEnabled: true,
|
||||
@@ -143,22 +129,13 @@ describe(CliService.name, () => {
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should return a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
|
||||
const result = await sut.enableMaintenanceMode();
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { isAbsolute } from 'node:path';
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, sendOneShotAppRestart } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
@@ -87,9 +87,6 @@ export class CliService extends BaseService {
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
sendOneShotAppRestart({
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz',
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user