Compare commits

..

8 Commits

Author SHA1 Message Date
Mees Frensel
3644f67f59 use single query param 2026-03-05 11:02:02 +01:00
Mees Frensel
372aad07d5 fix(web): gracefully handle map errors when WebGL is disabled 2026-02-26 17:53:38 +01:00
Mees Frensel
e454c3566b refactor: star rating (#26357)
* refactor: star rating

* transform rating 0 to null in controller dto

* migrate rating 0 to null

* deprecate rating -1

* rating type annotation

* update Rating type
2026-02-26 14:54:20 +01:00
Luis Nachtigall
4c79c3c902 feat(mobile): Prevent premature image cache eviction when higher image loading is enabled (#26208)
* feat(mobile): enhance image loading with error handling and eviction options

* pull request suggestions: remove log and remove useless local implementation
2026-02-25 17:31:37 -05:00
Alex
3bed1b6131 fix: consider DAR when extracting video dimension (#25293)
* feat: consider DAR when extracting video dimension

* move to probe

* comment

---------

Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
2026-02-25 21:58:53 +00:00
Brandon Wees
3c9fb651d0 feat(server): SyncAssetEditV1 (#26446)
* feat: SyncAssetEditV1

* fix: audit table import

* fix: sql tools table fetch

* fix: medium tests (wip)

* fix: circ dependency

* chore: finalize tests

* chore: codegen/lint

* fix: code review
2026-02-25 18:12:41 +00:00
Mees Frensel
55e625a2ac fix(web): error page i18n (#26517) 2026-02-25 18:35:25 +01:00
Daniel Dietzler
ca6c486a80 refactor: person stubs (#26512) 2026-02-25 08:56:00 -05:00
120 changed files with 1737 additions and 6359 deletions

View File

@@ -99,7 +99,7 @@ describe('/admin/maintenance', () => {
},
{
interval: 500,
timeout: 60_000,
timeout: 10_000,
},
)
.toBeTruthy();
@@ -190,7 +190,7 @@ describe('/admin/maintenance', () => {
},
{
interval: 500,
timeout: 60_000,
timeout: 10_000,
},
)
.toBeFalsy();

View File

@@ -1,669 +0,0 @@
import {
AssetMediaResponseDto,
IntegrityReportResponseDto,
LoginResponseDto,
ManualJobName,
QueueCommand,
QueueName,
} from '@immich/sdk';
import { readFile } from 'node:fs/promises';
import { app, testAssetDir, utils } from 'src/utils';
import request from 'supertest';
import { afterEach, beforeAll, describe, expect, it } from 'vitest';
const assetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
const asset1Filepath = `${testAssetDir}/albums/nature/el_torcal_rocks.jpg`;
const asset2Filepath = `${testAssetDir}/albums/nature/wood_anemones.jpg`;
describe('/admin/integrity', () => {
let admin: LoginResponseDto;
let asset: AssetMediaResponseDto;
let user1: LoginResponseDto;
let asset1: AssetMediaResponseDto;
let user2: LoginResponseDto;
let asset2: AssetMediaResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
user1 = await utils.userSetup(admin.accessToken, {
email: '1@example.com',
name: '1',
password: '1',
});
user2 = await utils.userSetup(admin.accessToken, {
email: '2@example.com',
name: '2',
password: '2',
});
for (const queue of Object.values(QueueName)) {
if (queue === QueueName.IntegrityCheck) {
continue;
}
await utils.queueCommand(admin.accessToken, queue, {
command: QueueCommand.Pause,
});
}
asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename: 'asset.jpg',
bytes: await readFile(assetFilepath),
},
});
asset1 = await utils.createAsset(user1.accessToken, {
assetData: {
filename: 'asset.jpg',
bytes: await readFile(asset1Filepath),
},
});
asset2 = await utils.createAsset(user2.accessToken, {
assetData: {
filename: 'asset.jpg',
bytes: await readFile(asset2Filepath),
},
});
await utils.mkFolder('/data/bak');
await utils.copyFolder(`/data/upload/${admin.userId}`, `/data/bak/${admin.userId}`);
for (const queue of Object.values(QueueName)) {
if (queue === QueueName.IntegrityCheck) {
continue;
}
await utils.queueCommand(admin.accessToken, queue, {
command: QueueCommand.Empty,
});
await utils.queueCommand(admin.accessToken, queue, {
command: QueueCommand.Resume,
});
}
});
afterEach(async () => {
await utils.deleteFolder(`/data/upload/${admin.userId}`);
await utils.copyFolder(`/data/bak/${admin.userId}`, `/data/upload/${admin.userId}`);
});
describe('POST /summary (& jobs)', async () => {
it.sequential('reports no issues', async () => {
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFilesDeleteAll,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual({
missing_file: 0,
untracked_file: 0,
checksum_mismatch: 0,
});
});
it.sequential('should detect an untracked file (job: check untracked files)', async () => {
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
untracked_file: 1,
}),
);
});
it.sequential('should detect outdated untracked file reports (job: refresh untracked files)', async () => {
// these should not be detected:
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked2.png`);
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked3.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFilesRefresh,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
untracked_file: 0,
}),
);
});
it.sequential('should delete untracked files (job: delete all untracked file reports)', async () => {
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFilesDeleteAll,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
untracked_file: 0,
}),
);
});
it.sequential('should detect a missing file and not a checksum mismatch (job: check missing files)', async () => {
await utils.deleteFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
missing_file: 1,
checksum_mismatch: 0,
}),
);
});
it.sequential('should detect outdated missing file reports (job: refresh missing files)', async () => {
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFilesRefresh,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
missing_file: 0,
checksum_mismatch: 0,
}),
);
});
it.sequential('should delete assets with missing files (job: delete all missing file reports)', async () => {
await utils.deleteFolder(`/data/upload/${user1.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus, body: listBody } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus).toBe(200);
expect(listBody).toEqual(
expect.objectContaining({
missing_file: 1,
}),
);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFilesDeleteAll,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
missing_file: 0,
}),
);
await expect(utils.getAssetInfo(user1.accessToken, asset1.id)).resolves.toEqual(
expect.objectContaining({
isTrashed: true,
}),
);
});
it.sequential('should detect a checksum mismatch (job: check file checksums)', async () => {
await utils.truncateFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
checksum_mismatch: 1,
}),
);
});
it.sequential('should detect outdated checksum mismatch reports (job: refresh file checksums)', async () => {
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatchRefresh,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
checksum_mismatch: 0,
}),
);
});
it.sequential(
'should delete assets with mismatched checksum (job: delete all checksum mismatch reports)',
async () => {
await utils.truncateFolder(`/data/upload/${user2.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus, body: listBody } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus).toBe(200);
expect(listBody).toEqual(
expect.objectContaining({
checksum_mismatch: 1,
}),
);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatchDeleteAll,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/summary')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
checksum_mismatch: 0,
}),
);
await expect(utils.getAssetInfo(user2.accessToken, asset2.id)).resolves.toEqual(
expect.objectContaining({
isTrashed: true,
}),
);
},
);
});
describe('POST /report', async () => {
it.sequential('reports untracked files', async () => {
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/report?type=untracked_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual({
nextCursor: undefined,
items: expect.arrayContaining([
{
id: expect.any(String),
type: 'untracked_file',
path: `/data/upload/${admin.userId}/untracked1.png`,
assetId: null,
fileAssetId: null,
createdAt: expect.any(String),
},
]),
});
});
it.sequential('reports missing files', async () => {
await utils.deleteFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/report?type=missing_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual({
nextCursor: undefined,
items: expect.arrayContaining([
{
id: expect.any(String),
type: 'missing_file',
path: expect.any(String),
assetId: asset.id,
fileAssetId: null,
createdAt: expect.any(String),
},
]),
});
});
it.sequential('reports checksum mismatched files', async () => {
await utils.truncateFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, body } = await request(app)
.get('/admin/integrity/report?type=checksum_mismatch')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(body).toEqual({
nextCursor: undefined,
items: expect.arrayContaining([
{
id: expect.any(String),
type: 'checksum_mismatch',
path: expect.any(String),
assetId: asset.id,
fileAssetId: null,
createdAt: expect.any(String),
},
]),
});
});
});
describe('DELETE /report/:id', async () => {
it.sequential('delete untracked files', async () => {
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus, body: listBody } = await request(app)
.get('/admin/integrity/report?type=untracked_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus).toBe(200);
const report = (listBody as IntegrityReportResponseDto).items.find(
(item) => item.path === `/data/upload/${admin.userId}/untracked1.png`,
)!;
const { status } = await request(app)
.delete(`/admin/integrity/report/${report.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus2, body: listBody2 } = await request(app)
.get('/admin/integrity/report?type=untracked_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus2).toBe(200);
expect(listBody2).not.toBe(
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
id: report.id,
}),
]),
}),
);
});
it.sequential('delete assets missing files', async () => {
await utils.deleteFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus, body: listBody } = await request(app)
.get('/admin/integrity/report?type=missing_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus).toBe(200);
expect(listBody.items.length).toBe(1);
const report = (listBody as IntegrityReportResponseDto).items[0];
const { status } = await request(app)
.delete(`/admin/integrity/report/${report.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityMissingFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus2, body: listBody2 } = await request(app)
.get('/admin/integrity/report?type=missing_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus2).toBe(200);
expect(listBody2.items.length).toBe(0);
});
it.sequential('delete assets with failing checksum', async () => {
await utils.truncateFolder(`/data/upload/${admin.userId}`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus, body: listBody } = await request(app)
.get('/admin/integrity/report?type=checksum_mismatch')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus).toBe(200);
expect(listBody.items.length).toBe(1);
const report = (listBody as IntegrityReportResponseDto).items[0];
const { status } = await request(app)
.delete(`/admin/integrity/report/${report.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityChecksumMismatch,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status: listStatus2, body: listBody2 } = await request(app)
.get('/admin/integrity/report?type=checksum_mismatch')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(listStatus2).toBe(200);
expect(listBody2.items.length).toBe(0);
});
});
describe('GET /report/:type/csv', () => {
it.sequential('exports untracked files as csv', async () => {
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { status, headers, text } = await request(app)
.get('/admin/integrity/report/untracked_file/csv')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(200);
expect(headers['content-type']).toContain('text/csv');
expect(headers['content-disposition']).toContain('.csv');
expect(text).toContain('id,type,assetId,fileAssetId,path');
expect(text).toContain(`untracked_file`);
expect(text).toContain(`/data/upload/${admin.userId}/untracked1.png`);
});
});
describe('GET /report/:id/file', () => {
it.sequential('downloads untracked file', async () => {
await utils.putTextFile('untracked-content', `/data/upload/${admin.userId}/untracked1.png`);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
const { body: listBody } = await request(app)
.get('/admin/integrity/report?type=untracked_file')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
const report = (listBody as IntegrityReportResponseDto).items.find(
(item) => item.path === `/data/upload/${admin.userId}/untracked1.png`,
)!;
const { status, headers, body } = await request(app)
.get(`/admin/integrity/report/${report.id}/file`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.buffer(true)
.send();
expect(status).toBe(200);
expect(headers['content-type']).toContain('application/octet-stream');
expect(body.toString()).toBe('untracked-content');
});
});
});

View File

@@ -1,41 +0,0 @@
import { LoginResponseDto, ManualJobName, QueueName } from '@immich/sdk';
import { expect, test } from '@playwright/test';
import { utils } from 'src/utils';
test.describe.configure({ mode: 'serial' });
test.describe('Integrity', () => {
let admin: LoginResponseDto;
test.beforeAll(async () => {
utils.initSdk();
await utils.resetDatabase();
admin = await utils.adminSetup();
});
test('run integrity jobs to update stats', async ({ context, page }) => {
await utils.setAuthCookies(context, admin.accessToken);
await utils.createJob(admin.accessToken, {
name: ManualJobName.IntegrityUntrackedFiles,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.IntegrityCheck);
await page.goto('/admin/maintenance');
const count = page.getByText('Untracked Files').locator('..').locator('..').locator('div').nth(1);
const previousCount = Number.parseInt((await count.textContent()) ?? '');
await utils.mkFolder(`/data/upload/${admin.userId}`);
await utils.putTextFile('untracked', `/data/upload/${admin.userId}/untracked1.png`);
const checkButton = page.getByText('Integrity Report').locator('..').getByRole('button', { name: 'Check All' });
await checkButton.click();
await expect(checkButton).toBeEnabled();
await expect(count).toContainText((previousCount + 1).toString());
});
});

View File

@@ -195,7 +195,6 @@ export const utils = {
'user',
'system_metadata',
'tag',
'integrity_report',
];
const sql: string[] = [];
@@ -587,54 +586,10 @@ export const utils = {
mkdirSync(`${testAssetDir}/temp`, { recursive: true });
},
putFile(source: string, dest: string) {
return executeCommand('docker', ['cp', source, `immich-e2e-server:${dest}`]).promise;
},
async putTextFile(contents: string, dest: string) {
const dir = await mkdtemp(join(tmpdir(), 'test-'));
const fn = join(dir, 'file');
await pipeline(Readable.from(contents), createWriteStream(fn));
return executeCommand('docker', ['cp', fn, `immich-e2e-server:${dest}`]).promise;
},
async move(source: string, dest: string) {
return executeCommand('docker', ['exec', 'immich-e2e-server', 'mv', source, dest]).promise;
},
async copyFolder(source: string, dest: string) {
return executeCommand('docker', ['exec', 'immich-e2e-server', 'cp', '-r', source, dest]).promise;
},
async deleteFile(path: string) {
return executeCommand('docker', ['exec', 'immich-e2e-server', 'rm', path]).promise;
},
async deleteFolder(path: string) {
return executeCommand('docker', ['exec', 'immich-e2e-server', 'rm', '-r', path]).promise;
},
async truncateFolder(path: string) {
return executeCommand('docker', [
'exec',
'immich-e2e-server',
'find',
path,
'-type',
'f',
'-exec',
'truncate',
'-s',
'1',
'{}',
';',
]).promise;
},
async mkFolder(path: string) {
return executeCommand('docker', ['exec', 'immich-e2e-server', 'mkdir', '-p', path]).promise;
},
createBackup: async (accessToken: string) => {
await utils.createJob(accessToken, {
name: ManualJobName.BackupDatabase,
@@ -649,8 +604,10 @@ export const utils = {
resetBackups: async (accessToken: string) => {
const { backups } = await listDatabaseBackups({ headers: asBearerAuth(accessToken) });
const backupFiles = backups.map((b) => b.filename);
await deleteDatabaseBackup(
{ databaseBackupDeleteDto: { backups: backups.map((dto) => dto.filename) } },
{ databaseBackupDeleteDto: { backups: backupFiles } },
{ headers: asBearerAuth(accessToken) },
);
},

View File

@@ -81,7 +81,6 @@
"cron_expression_description": "Set the scanning interval using the cron format. For more information please refer to e.g. <link>Crontab Guru</link>",
"cron_expression_presets": "Cron expression presets",
"disable_login": "Disable login",
"download_csv": "Download CSV",
"duplicate_detection_job_description": "Run machine learning on assets to detect similar images. Relies on Smart Search",
"exclusion_pattern_description": "Exclusion patterns lets you ignore files and folders when scanning your library. This is useful if you have folders that contain files you don't want to import, such as RAW files.",
"export_config_as_json_description": "Download the current system config as a JSON file",
@@ -194,17 +193,6 @@
"maintenance_delete_backup": "Delete Backup",
"maintenance_delete_backup_description": "This file will be irrevocably deleted.",
"maintenance_delete_error": "Failed to delete backup.",
"maintenance_integrity_check_all": "Check All",
"maintenance_integrity_checksum_mismatch": "Checksum Mismatch",
"maintenance_integrity_checksum_mismatch_job": "Check for checksum mismatches",
"maintenance_integrity_checksum_mismatch_refresh_job": "Refresh checksum mismatch reports",
"maintenance_integrity_missing_file": "Missing Files",
"maintenance_integrity_missing_file_job": "Check for missing files",
"maintenance_integrity_missing_file_refresh_job": "Refresh missing file reports",
"maintenance_integrity_report": "Integrity Report",
"maintenance_integrity_untracked_file": "Untracked Files",
"maintenance_integrity_untracked_file_job": "Check for untracked files",
"maintenance_integrity_untracked_file_refresh_job": "Refresh untracked file reports",
"maintenance_restore_backup": "Restore Backup",
"maintenance_restore_backup_description": "Immich will be wiped and restored from the chosen backup. A backup will be created before continuing.",
"maintenance_restore_backup_different_version": "This backup was created with a different version of Immich!",
@@ -1062,6 +1050,7 @@
"cant_get_number_of_comments": "Can't get number of comments",
"cant_search_people": "Can't search people",
"cant_search_places": "Can't search places",
"enable_webgl_for_map": "Enable WebGL to load the map.{isAdmin, select, true { To hide this warning, disable the map feature.} other {}}",
"error_adding_assets_to_album": "Error adding assets to album",
"error_adding_users_to_album": "Error adding users to album",
"error_deleting_shared_user": "Error deleting shared user",
@@ -1086,6 +1075,7 @@
"failed_to_update_notification_status": "Failed to update notification status",
"incorrect_email_or_password": "Incorrect email or password",
"library_folder_already_exists": "This import path already exists.",
"page_not_found": "Page not found :/",
"paths_validation_failed": "{paths, plural, one {# path} other {# paths}} failed validation",
"profile_picture_transparent_pixels": "Profile pictures cannot have transparent pixels. Please zoom in and/or move the image.",
"quota_higher_than_disk_size": "You set a quota higher than the disk size",
@@ -1209,7 +1199,6 @@
"failed": "Failed",
"failed_count": "Failed: {count}",
"failed_to_authenticate": "Failed to authenticate",
"failed_to_delete_file": "Failed to delete file",
"failed_to_load_assets": "Failed to load assets",
"failed_to_load_folder": "Failed to load folder",
"favorite": "Favorite",
@@ -1257,6 +1246,7 @@
"go_back": "Go back",
"go_to_folder": "Go to folder",
"go_to_search": "Go to search",
"go_to_settings": "Go to settings",
"gps": "GPS",
"gps_missing": "No GPS",
"grant_permission": "Grant permission",
@@ -1339,7 +1329,6 @@
"individual_share": "Individual share",
"individual_shares": "Individual shares",
"info": "Info",
"integrity_checks": "Integrity Checks",
"interval": {
"day_at_onepm": "Every day at 1pm",
"hours": "Every {hours, plural, one {hour} other {{hours, number} hours}}",
@@ -1406,7 +1395,6 @@
"link_to_oauth": "Link to OAuth",
"linked_oauth_account": "Linked OAuth account",
"list": "List",
"load_more": "Load More",
"loading": "Loading",
"loading_search_results_failed": "Loading search results failed",
"local": "Local",
@@ -1824,9 +1812,8 @@
"rate_asset": "Rate Asset",
"rating": "Star rating",
"rating_clear": "Clear rating",
"rating_count": "{count, plural, one {# star} other {# stars}}",
"rating_count": "{count, plural, =0 {Unrated} one {# star} other {# stars}}",
"rating_description": "Display the EXIF rating in the info panel",
"rating_set": "Rating set to {rating, plural, one {# star} other {# stars}}",
"reaction_options": "Reaction options",
"read_changelog": "Read Changelog",
"readonly_mode_disabled": "Read-only mode disabled",

View File

@@ -48,7 +48,7 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
return null;
}
Stream<ImageInfo> loadRequest(ImageRequest request, ImageDecoderCallback decode) async* {
Stream<ImageInfo> loadRequest(ImageRequest request, ImageDecoderCallback decode, {bool evictOnError = true}) async* {
if (isCancelled) {
this.request = null;
PaintingBinding.instance.imageCache.evict(this);
@@ -57,14 +57,19 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
try {
final image = await request.load(decode);
if (image == null || isCancelled) {
if ((image == null && evictOnError) || isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
} else if (image == null) {
return;
}
yield image;
} catch (e) {
PaintingBinding.instance.imageCache.evict(this);
rethrow;
} catch (e, stack) {
if (evictOnError) {
PaintingBinding.instance.imageCache.evict(this);
rethrow;
}
_log.warning('Non-fatal image load error', e, stack);
} finally {
this.request = null;
}

View File

@@ -94,7 +94,6 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
size: Size(size.width * devicePixelRatio, size.height * devicePixelRatio),
assetType: key.assetType,
);
yield* loadRequest(request, decode);
if (!Store.get(StoreKey.loadOriginal, false)) {

View File

@@ -93,9 +93,10 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
uri: getThumbnailUrlForRemoteId(key.assetId, type: AssetMediaSize.preview, thumbhash: key.thumbhash),
headers: headers,
);
yield* loadRequest(previewRequest, decode);
final loadOriginal = assetType == AssetType.image && AppSetting.get(Setting.loadOriginal);
yield* loadRequest(previewRequest, decode, evictOnError: !loadOriginal);
if (assetType != AssetType.image || !AppSetting.get(Setting.loadOriginal)) {
if (!loadOriginal) {
return;
}

View File

@@ -171,12 +171,7 @@ Class | Method | HTTP request | Description
*LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | Scan a library
*LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | Update a library
*LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | Validate library settings
*MaintenanceAdminApi* | [**deleteIntegrityReport**](doc//MaintenanceAdminApi.md#deleteintegrityreport) | **DELETE** /admin/integrity/report/{id} | Delete integrity report item
*MaintenanceAdminApi* | [**detectPriorInstall**](doc//MaintenanceAdminApi.md#detectpriorinstall) | **GET** /admin/maintenance/detect-install | Detect existing install
*MaintenanceAdminApi* | [**getIntegrityReport**](doc//MaintenanceAdminApi.md#getintegrityreport) | **GET** /admin/integrity/report | Get integrity report by type
*MaintenanceAdminApi* | [**getIntegrityReportCsv**](doc//MaintenanceAdminApi.md#getintegrityreportcsv) | **GET** /admin/integrity/report/{type}/csv | Export integrity report by type as CSV
*MaintenanceAdminApi* | [**getIntegrityReportFile**](doc//MaintenanceAdminApi.md#getintegrityreportfile) | **GET** /admin/integrity/report/{id}/file | Download flagged file
*MaintenanceAdminApi* | [**getIntegrityReportSummary**](doc//MaintenanceAdminApi.md#getintegrityreportsummary) | **GET** /admin/integrity/summary | Get integrity report summary
*MaintenanceAdminApi* | [**getMaintenanceStatus**](doc//MaintenanceAdminApi.md#getmaintenancestatus) | **GET** /admin/maintenance/status | Get maintenance mode status
*MaintenanceAdminApi* | [**maintenanceLogin**](doc//MaintenanceAdminApi.md#maintenancelogin) | **POST** /admin/maintenance/login | Log into maintenance mode
*MaintenanceAdminApi* | [**setMaintenanceMode**](doc//MaintenanceAdminApi.md#setmaintenancemode) | **POST** /admin/maintenance | Set maintenance mode
@@ -436,10 +431,6 @@ Class | Method | HTTP request | Description
- [FoldersResponse](doc//FoldersResponse.md)
- [FoldersUpdate](doc//FoldersUpdate.md)
- [ImageFormat](doc//ImageFormat.md)
- [IntegrityReportDto](doc//IntegrityReportDto.md)
- [IntegrityReportResponseDto](doc//IntegrityReportResponseDto.md)
- [IntegrityReportSummaryResponseDto](doc//IntegrityReportSummaryResponseDto.md)
- [IntegrityReportType](doc//IntegrityReportType.md)
- [JobCreateDto](doc//JobCreateDto.md)
- [JobName](doc//JobName.md)
- [JobSettingsDto](doc//JobSettingsDto.md)
@@ -585,6 +576,8 @@ Class | Method | HTTP request | Description
- [SyncAlbumUserV1](doc//SyncAlbumUserV1.md)
- [SyncAlbumV1](doc//SyncAlbumV1.md)
- [SyncAssetDeleteV1](doc//SyncAssetDeleteV1.md)
- [SyncAssetEditDeleteV1](doc//SyncAssetEditDeleteV1.md)
- [SyncAssetEditV1](doc//SyncAssetEditV1.md)
- [SyncAssetExifV1](doc//SyncAssetExifV1.md)
- [SyncAssetFaceDeleteV1](doc//SyncAssetFaceDeleteV1.md)
- [SyncAssetFaceV1](doc//SyncAssetFaceV1.md)
@@ -617,9 +610,6 @@ Class | Method | HTTP request | Description
- [SystemConfigGeneratedFullsizeImageDto](doc//SystemConfigGeneratedFullsizeImageDto.md)
- [SystemConfigGeneratedImageDto](doc//SystemConfigGeneratedImageDto.md)
- [SystemConfigImageDto](doc//SystemConfigImageDto.md)
- [SystemConfigIntegrityChecks](doc//SystemConfigIntegrityChecks.md)
- [SystemConfigIntegrityChecksumJob](doc//SystemConfigIntegrityChecksumJob.md)
- [SystemConfigIntegrityJob](doc//SystemConfigIntegrityJob.md)
- [SystemConfigJobDto](doc//SystemConfigJobDto.md)
- [SystemConfigLibraryDto](doc//SystemConfigLibraryDto.md)
- [SystemConfigLibraryScanDto](doc//SystemConfigLibraryScanDto.md)

View File

@@ -170,10 +170,6 @@ part 'model/facial_recognition_config.dart';
part 'model/folders_response.dart';
part 'model/folders_update.dart';
part 'model/image_format.dart';
part 'model/integrity_report_dto.dart';
part 'model/integrity_report_response_dto.dart';
part 'model/integrity_report_summary_response_dto.dart';
part 'model/integrity_report_type.dart';
part 'model/job_create_dto.dart';
part 'model/job_name.dart';
part 'model/job_settings_dto.dart';
@@ -319,6 +315,8 @@ part 'model/sync_album_user_delete_v1.dart';
part 'model/sync_album_user_v1.dart';
part 'model/sync_album_v1.dart';
part 'model/sync_asset_delete_v1.dart';
part 'model/sync_asset_edit_delete_v1.dart';
part 'model/sync_asset_edit_v1.dart';
part 'model/sync_asset_exif_v1.dart';
part 'model/sync_asset_face_delete_v1.dart';
part 'model/sync_asset_face_v1.dart';
@@ -351,9 +349,6 @@ part 'model/system_config_faces_dto.dart';
part 'model/system_config_generated_fullsize_image_dto.dart';
part 'model/system_config_generated_image_dto.dart';
part 'model/system_config_image_dto.dart';
part 'model/system_config_integrity_checks.dart';
part 'model/system_config_integrity_checksum_job.dart';
part 'model/system_config_integrity_job.dart';
part 'model/system_config_job_dto.dart';
part 'model/system_config_library_dto.dart';
part 'model/system_config_library_scan_dto.dart';

View File

@@ -16,55 +16,6 @@ class MaintenanceAdminApi {
final ApiClient apiClient;
/// Delete integrity report item
///
/// Delete a given report item and perform corresponding deletion (e.g. trash asset, delete file)
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
Future<Response> deleteIntegrityReportWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/integrity/report/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Delete integrity report item
///
/// Delete a given report item and perform corresponding deletion (e.g. trash asset, delete file)
///
/// Parameters:
///
/// * [String] id (required):
Future<void> deleteIntegrityReport(String id,) async {
final response = await deleteIntegrityReportWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Detect existing install
///
/// Collect integrity checks and other heuristics about local data.
@@ -113,244 +64,6 @@ class MaintenanceAdminApi {
return null;
}
/// Get integrity report by type
///
/// Get all flagged items by integrity report type
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [IntegrityReportType] type (required):
///
/// * [String] cursor:
/// Cursor for pagination
///
/// * [num] limit:
/// Number of items per page
Future<Response> getIntegrityReportWithHttpInfo(IntegrityReportType type, { String? cursor, num? limit, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/integrity/report';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (cursor != null) {
queryParams.addAll(_queryParams('', 'cursor', cursor));
}
if (limit != null) {
queryParams.addAll(_queryParams('', 'limit', limit));
}
queryParams.addAll(_queryParams('', 'type', type));
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Get integrity report by type
///
/// Get all flagged items by integrity report type
///
/// Parameters:
///
/// * [IntegrityReportType] type (required):
///
/// * [String] cursor:
/// Cursor for pagination
///
/// * [num] limit:
/// Number of items per page
Future<IntegrityReportResponseDto?> getIntegrityReport(IntegrityReportType type, { String? cursor, num? limit, }) async {
final response = await getIntegrityReportWithHttpInfo(type, cursor: cursor, limit: limit, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'IntegrityReportResponseDto',) as IntegrityReportResponseDto;
}
return null;
}
/// Export integrity report by type as CSV
///
/// Get all integrity report entries for a given type as a CSV
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [IntegrityReportType] type (required):
Future<Response> getIntegrityReportCsvWithHttpInfo(IntegrityReportType type,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/integrity/report/{type}/csv'
.replaceAll('{type}', type.toString());
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Export integrity report by type as CSV
///
/// Get all integrity report entries for a given type as a CSV
///
/// Parameters:
///
/// * [IntegrityReportType] type (required):
Future<MultipartFile?> getIntegrityReportCsv(IntegrityReportType type,) async {
final response = await getIntegrityReportCsvWithHttpInfo(type,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
}
return null;
}
/// Download flagged file
///
/// Download the untracked/broken file if one exists
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
Future<Response> getIntegrityReportFileWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/integrity/report/{id}/file'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Download flagged file
///
/// Download the untracked/broken file if one exists
///
/// Parameters:
///
/// * [String] id (required):
Future<MultipartFile?> getIntegrityReportFile(String id,) async {
final response = await getIntegrityReportFileWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
}
return null;
}
/// Get integrity report summary
///
/// Get a count of the items flagged in each integrity report
///
/// Note: This method returns the HTTP [Response].
Future<Response> getIntegrityReportSummaryWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/integrity/summary';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Get integrity report summary
///
/// Get a count of the items flagged in each integrity report
Future<IntegrityReportSummaryResponseDto?> getIntegrityReportSummary() async {
final response = await getIntegrityReportSummaryWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'IntegrityReportSummaryResponseDto',) as IntegrityReportSummaryResponseDto;
}
return null;
}
/// Get maintenance mode status
///
/// Fetch information about the currently running maintenance action.

View File

@@ -410,7 +410,7 @@ class SearchApi {
/// Filter by person IDs
///
/// * [num] rating:
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// * [num] size:
/// Number of results to return
@@ -633,7 +633,7 @@ class SearchApi {
/// Filter by person IDs
///
/// * [num] rating:
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// * [num] size:
/// Number of results to return

View File

@@ -386,14 +386,6 @@ class ApiClient {
return FoldersUpdate.fromJson(value);
case 'ImageFormat':
return ImageFormatTypeTransformer().decode(value);
case 'IntegrityReportDto':
return IntegrityReportDto.fromJson(value);
case 'IntegrityReportResponseDto':
return IntegrityReportResponseDto.fromJson(value);
case 'IntegrityReportSummaryResponseDto':
return IntegrityReportSummaryResponseDto.fromJson(value);
case 'IntegrityReportType':
return IntegrityReportTypeTypeTransformer().decode(value);
case 'JobCreateDto':
return JobCreateDto.fromJson(value);
case 'JobName':
@@ -684,6 +676,10 @@ class ApiClient {
return SyncAlbumV1.fromJson(value);
case 'SyncAssetDeleteV1':
return SyncAssetDeleteV1.fromJson(value);
case 'SyncAssetEditDeleteV1':
return SyncAssetEditDeleteV1.fromJson(value);
case 'SyncAssetEditV1':
return SyncAssetEditV1.fromJson(value);
case 'SyncAssetExifV1':
return SyncAssetExifV1.fromJson(value);
case 'SyncAssetFaceDeleteV1':
@@ -748,12 +744,6 @@ class ApiClient {
return SystemConfigGeneratedImageDto.fromJson(value);
case 'SystemConfigImageDto':
return SystemConfigImageDto.fromJson(value);
case 'SystemConfigIntegrityChecks':
return SystemConfigIntegrityChecks.fromJson(value);
case 'SystemConfigIntegrityChecksumJob':
return SystemConfigIntegrityChecksumJob.fromJson(value);
case 'SystemConfigIntegrityJob':
return SystemConfigIntegrityJob.fromJson(value);
case 'SystemConfigJobDto':
return SystemConfigJobDto.fromJson(value);
case 'SystemConfigLibraryDto':

View File

@@ -94,9 +94,6 @@ String parameterToString(dynamic value) {
if (value is ImageFormat) {
return ImageFormatTypeTransformer().encode(value).toString();
}
if (value is IntegrityReportType) {
return IntegrityReportTypeTypeTransformer().encode(value).toString();
}
if (value is JobName) {
return JobNameTypeTransformer().encode(value).toString();
}

View File

@@ -86,16 +86,10 @@ class AssetBulkUpdateDto {
///
num? longitude;
/// Rating
/// Rating in range [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Time zone (IANA timezone)
@@ -223,7 +217,9 @@ class AssetBulkUpdateDto {
isFavorite: mapValueOfType<bool>(json, r'isFavorite'),
latitude: num.parse('${json[r'latitude']}'),
longitude: num.parse('${json[r'longitude']}'),
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
timeZone: mapValueOfType<String>(json, r'timeZone'),
visibility: AssetVisibility.fromJson(json[r'visibility']),
);

View File

@@ -1,115 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class IntegrityReportDto {
/// Returns a new [IntegrityReportDto] instance.
IntegrityReportDto({
required this.id,
required this.path,
required this.type,
});
String id;
String path;
IntegrityReportType type;
@override
bool operator ==(Object other) => identical(this, other) || other is IntegrityReportDto &&
other.id == id &&
other.path == path &&
other.type == type;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(id.hashCode) +
(path.hashCode) +
(type.hashCode);
@override
String toString() => 'IntegrityReportDto[id=$id, path=$path, type=$type]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'id'] = this.id;
json[r'path'] = this.path;
json[r'type'] = this.type;
return json;
}
/// Returns a new [IntegrityReportDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static IntegrityReportDto? fromJson(dynamic value) {
upgradeDto(value, "IntegrityReportDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return IntegrityReportDto(
id: mapValueOfType<String>(json, r'id')!,
path: mapValueOfType<String>(json, r'path')!,
type: IntegrityReportType.fromJson(json[r'type'])!,
);
}
return null;
}
static List<IntegrityReportDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <IntegrityReportDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = IntegrityReportDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, IntegrityReportDto> mapFromJson(dynamic json) {
final map = <String, IntegrityReportDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = IntegrityReportDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of IntegrityReportDto-objects as value to a dart map
static Map<String, List<IntegrityReportDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<IntegrityReportDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = IntegrityReportDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'id',
'path',
'type',
};
}

View File

@@ -1,116 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class IntegrityReportResponseDto {
/// Returns a new [IntegrityReportResponseDto] instance.
IntegrityReportResponseDto({
this.items = const [],
this.nextCursor,
});
List<IntegrityReportDto> items;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? nextCursor;
@override
bool operator ==(Object other) => identical(this, other) || other is IntegrityReportResponseDto &&
_deepEquality.equals(other.items, items) &&
other.nextCursor == nextCursor;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(items.hashCode) +
(nextCursor == null ? 0 : nextCursor!.hashCode);
@override
String toString() => 'IntegrityReportResponseDto[items=$items, nextCursor=$nextCursor]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'items'] = this.items;
if (this.nextCursor != null) {
json[r'nextCursor'] = this.nextCursor;
} else {
// json[r'nextCursor'] = null;
}
return json;
}
/// Returns a new [IntegrityReportResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static IntegrityReportResponseDto? fromJson(dynamic value) {
upgradeDto(value, "IntegrityReportResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return IntegrityReportResponseDto(
items: IntegrityReportDto.listFromJson(json[r'items']),
nextCursor: mapValueOfType<String>(json, r'nextCursor'),
);
}
return null;
}
static List<IntegrityReportResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <IntegrityReportResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = IntegrityReportResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, IntegrityReportResponseDto> mapFromJson(dynamic json) {
final map = <String, IntegrityReportResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = IntegrityReportResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of IntegrityReportResponseDto-objects as value to a dart map
static Map<String, List<IntegrityReportResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<IntegrityReportResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = IntegrityReportResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'items',
};
}

View File

@@ -1,115 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class IntegrityReportSummaryResponseDto {
/// Returns a new [IntegrityReportSummaryResponseDto] instance.
IntegrityReportSummaryResponseDto({
required this.checksumMismatch,
required this.missingFile,
required this.untrackedFile,
});
int checksumMismatch;
int missingFile;
int untrackedFile;
@override
bool operator ==(Object other) => identical(this, other) || other is IntegrityReportSummaryResponseDto &&
other.checksumMismatch == checksumMismatch &&
other.missingFile == missingFile &&
other.untrackedFile == untrackedFile;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksumMismatch.hashCode) +
(missingFile.hashCode) +
(untrackedFile.hashCode);
@override
String toString() => 'IntegrityReportSummaryResponseDto[checksumMismatch=$checksumMismatch, missingFile=$missingFile, untrackedFile=$untrackedFile]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'checksum_mismatch'] = this.checksumMismatch;
json[r'missing_file'] = this.missingFile;
json[r'untracked_file'] = this.untrackedFile;
return json;
}
/// Returns a new [IntegrityReportSummaryResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static IntegrityReportSummaryResponseDto? fromJson(dynamic value) {
upgradeDto(value, "IntegrityReportSummaryResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return IntegrityReportSummaryResponseDto(
checksumMismatch: mapValueOfType<int>(json, r'checksum_mismatch')!,
missingFile: mapValueOfType<int>(json, r'missing_file')!,
untrackedFile: mapValueOfType<int>(json, r'untracked_file')!,
);
}
return null;
}
static List<IntegrityReportSummaryResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <IntegrityReportSummaryResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = IntegrityReportSummaryResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, IntegrityReportSummaryResponseDto> mapFromJson(dynamic json) {
final map = <String, IntegrityReportSummaryResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = IntegrityReportSummaryResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of IntegrityReportSummaryResponseDto-objects as value to a dart map
static Map<String, List<IntegrityReportSummaryResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<IntegrityReportSummaryResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = IntegrityReportSummaryResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'checksum_mismatch',
'missing_file',
'untracked_file',
};
}

View File

@@ -1,88 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class IntegrityReportType {
/// Instantiate a new enum with the provided [value].
const IntegrityReportType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const untrackedFile = IntegrityReportType._(r'untracked_file');
static const missingFile = IntegrityReportType._(r'missing_file');
static const checksumMismatch = IntegrityReportType._(r'checksum_mismatch');
/// List of all possible values in this [enum][IntegrityReportType].
static const values = <IntegrityReportType>[
untrackedFile,
missingFile,
checksumMismatch,
];
static IntegrityReportType? fromJson(dynamic value) => IntegrityReportTypeTypeTransformer().decode(value);
static List<IntegrityReportType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <IntegrityReportType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = IntegrityReportType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [IntegrityReportType] to String,
/// and [decode] dynamic data back to [IntegrityReportType].
class IntegrityReportTypeTypeTransformer {
factory IntegrityReportTypeTypeTransformer() => _instance ??= const IntegrityReportTypeTypeTransformer._();
const IntegrityReportTypeTypeTransformer._();
String encode(IntegrityReportType data) => data.value;
/// Decodes a [dynamic value][data] to a IntegrityReportType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
IntegrityReportType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'untracked_file': return IntegrityReportType.untrackedFile;
case r'missing_file': return IntegrityReportType.missingFile;
case r'checksum_mismatch': return IntegrityReportType.checksumMismatch;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [IntegrityReportTypeTypeTransformer] instance.
static IntegrityReportTypeTypeTransformer? _instance;
}

View File

@@ -79,16 +79,6 @@ class JobName {
static const ocrQueueAll = JobName._(r'OcrQueueAll');
static const ocr = JobName._(r'Ocr');
static const workflowRun = JobName._(r'WorkflowRun');
static const integrityUntrackedFilesQueueAll = JobName._(r'IntegrityUntrackedFilesQueueAll');
static const integrityUntrackedFiles = JobName._(r'IntegrityUntrackedFiles');
static const integrityUntrackedRefresh = JobName._(r'IntegrityUntrackedRefresh');
static const integrityMissingFilesQueueAll = JobName._(r'IntegrityMissingFilesQueueAll');
static const integrityMissingFiles = JobName._(r'IntegrityMissingFiles');
static const integrityMissingFilesRefresh = JobName._(r'IntegrityMissingFilesRefresh');
static const integrityChecksumFiles = JobName._(r'IntegrityChecksumFiles');
static const integrityChecksumFilesRefresh = JobName._(r'IntegrityChecksumFilesRefresh');
static const integrityDeleteReportType = JobName._(r'IntegrityDeleteReportType');
static const integrityDeleteReports = JobName._(r'IntegrityDeleteReports');
/// List of all possible values in this [enum][JobName].
static const values = <JobName>[
@@ -148,16 +138,6 @@ class JobName {
ocrQueueAll,
ocr,
workflowRun,
integrityUntrackedFilesQueueAll,
integrityUntrackedFiles,
integrityUntrackedRefresh,
integrityMissingFilesQueueAll,
integrityMissingFiles,
integrityMissingFilesRefresh,
integrityChecksumFiles,
integrityChecksumFilesRefresh,
integrityDeleteReportType,
integrityDeleteReports,
];
static JobName? fromJson(dynamic value) => JobNameTypeTransformer().decode(value);
@@ -252,16 +232,6 @@ class JobNameTypeTransformer {
case r'OcrQueueAll': return JobName.ocrQueueAll;
case r'Ocr': return JobName.ocr;
case r'WorkflowRun': return JobName.workflowRun;
case r'IntegrityUntrackedFilesQueueAll': return JobName.integrityUntrackedFilesQueueAll;
case r'IntegrityUntrackedFiles': return JobName.integrityUntrackedFiles;
case r'IntegrityUntrackedRefresh': return JobName.integrityUntrackedRefresh;
case r'IntegrityMissingFilesQueueAll': return JobName.integrityMissingFilesQueueAll;
case r'IntegrityMissingFiles': return JobName.integrityMissingFiles;
case r'IntegrityMissingFilesRefresh': return JobName.integrityMissingFilesRefresh;
case r'IntegrityChecksumFiles': return JobName.integrityChecksumFiles;
case r'IntegrityChecksumFilesRefresh': return JobName.integrityChecksumFilesRefresh;
case r'IntegrityDeleteReportType': return JobName.integrityDeleteReportType;
case r'IntegrityDeleteReports': return JobName.integrityDeleteReports;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -29,15 +29,6 @@ class ManualJobName {
static const memoryCleanup = ManualJobName._(r'memory-cleanup');
static const memoryCreate = ManualJobName._(r'memory-create');
static const backupDatabase = ManualJobName._(r'backup-database');
static const integrityMissingFiles = ManualJobName._(r'integrity-missing-files');
static const integrityUntrackedFiles = ManualJobName._(r'integrity-untracked-files');
static const integrityChecksumMismatch = ManualJobName._(r'integrity-checksum-mismatch');
static const integrityMissingFilesRefresh = ManualJobName._(r'integrity-missing-files-refresh');
static const integrityUntrackedFilesRefresh = ManualJobName._(r'integrity-untracked-files-refresh');
static const integrityChecksumMismatchRefresh = ManualJobName._(r'integrity-checksum-mismatch-refresh');
static const integrityMissingFilesDeleteAll = ManualJobName._(r'integrity-missing-files-delete-all');
static const integrityUntrackedFilesDeleteAll = ManualJobName._(r'integrity-untracked-files-delete-all');
static const integrityChecksumMismatchDeleteAll = ManualJobName._(r'integrity-checksum-mismatch-delete-all');
/// List of all possible values in this [enum][ManualJobName].
static const values = <ManualJobName>[
@@ -47,15 +38,6 @@ class ManualJobName {
memoryCleanup,
memoryCreate,
backupDatabase,
integrityMissingFiles,
integrityUntrackedFiles,
integrityChecksumMismatch,
integrityMissingFilesRefresh,
integrityUntrackedFilesRefresh,
integrityChecksumMismatchRefresh,
integrityMissingFilesDeleteAll,
integrityUntrackedFilesDeleteAll,
integrityChecksumMismatchDeleteAll,
];
static ManualJobName? fromJson(dynamic value) => ManualJobNameTypeTransformer().decode(value);
@@ -100,15 +82,6 @@ class ManualJobNameTypeTransformer {
case r'memory-cleanup': return ManualJobName.memoryCleanup;
case r'memory-create': return ManualJobName.memoryCreate;
case r'backup-database': return ManualJobName.backupDatabase;
case r'integrity-missing-files': return ManualJobName.integrityMissingFiles;
case r'integrity-untracked-files': return ManualJobName.integrityUntrackedFiles;
case r'integrity-checksum-mismatch': return ManualJobName.integrityChecksumMismatch;
case r'integrity-missing-files-refresh': return ManualJobName.integrityMissingFilesRefresh;
case r'integrity-untracked-files-refresh': return ManualJobName.integrityUntrackedFilesRefresh;
case r'integrity-checksum-mismatch-refresh': return ManualJobName.integrityChecksumMismatchRefresh;
case r'integrity-missing-files-delete-all': return ManualJobName.integrityMissingFilesDeleteAll;
case r'integrity-untracked-files-delete-all': return ManualJobName.integrityUntrackedFilesDeleteAll;
case r'integrity-checksum-mismatch-delete-all': return ManualJobName.integrityChecksumMismatchDeleteAll;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -256,16 +256,10 @@ class MetadataSearchDto {
///
String? previewPath;
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Number of results to return
@@ -754,7 +748,9 @@ class MetadataSearchDto {
? (json[r'personIds'] as Iterable).cast<String>().toList(growable: false)
: const [],
previewPath: mapValueOfType<String>(json, r'previewPath'),
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
size: num.parse('${json[r'size']}'),
state: mapValueOfType<String>(json, r'state'),
tagIds: json[r'tagIds'] is Iterable

View File

@@ -40,7 +40,6 @@ class QueueName {
static const backupDatabase = QueueName._(r'backupDatabase');
static const ocr = QueueName._(r'ocr');
static const workflow = QueueName._(r'workflow');
static const integrityCheck = QueueName._(r'integrityCheck');
static const editor = QueueName._(r'editor');
/// List of all possible values in this [enum][QueueName].
@@ -62,7 +61,6 @@ class QueueName {
backupDatabase,
ocr,
workflow,
integrityCheck,
editor,
];
@@ -119,7 +117,6 @@ class QueueNameTypeTransformer {
case r'backupDatabase': return QueueName.backupDatabase;
case r'ocr': return QueueName.ocr;
case r'workflow': return QueueName.workflow;
case r'integrityCheck': return QueueName.integrityCheck;
case r'editor': return QueueName.editor;
default:
if (!allowNull) {

View File

@@ -19,7 +19,6 @@ class QueuesResponseLegacyDto {
required this.editor,
required this.faceDetection,
required this.facialRecognition,
required this.integrityCheck,
required this.library_,
required this.metadataExtraction,
required this.migration,
@@ -46,8 +45,6 @@ class QueuesResponseLegacyDto {
QueueResponseLegacyDto facialRecognition;
QueueResponseLegacyDto integrityCheck;
QueueResponseLegacyDto library_;
QueueResponseLegacyDto metadataExtraction;
@@ -80,7 +77,6 @@ class QueuesResponseLegacyDto {
other.editor == editor &&
other.faceDetection == faceDetection &&
other.facialRecognition == facialRecognition &&
other.integrityCheck == integrityCheck &&
other.library_ == library_ &&
other.metadataExtraction == metadataExtraction &&
other.migration == migration &&
@@ -103,7 +99,6 @@ class QueuesResponseLegacyDto {
(editor.hashCode) +
(faceDetection.hashCode) +
(facialRecognition.hashCode) +
(integrityCheck.hashCode) +
(library_.hashCode) +
(metadataExtraction.hashCode) +
(migration.hashCode) +
@@ -118,7 +113,7 @@ class QueuesResponseLegacyDto {
(workflow.hashCode);
@override
String toString() => 'QueuesResponseLegacyDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, editor=$editor, faceDetection=$faceDetection, facialRecognition=$facialRecognition, integrityCheck=$integrityCheck, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, ocr=$ocr, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion, workflow=$workflow]';
String toString() => 'QueuesResponseLegacyDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, editor=$editor, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, ocr=$ocr, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion, workflow=$workflow]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -128,7 +123,6 @@ class QueuesResponseLegacyDto {
json[r'editor'] = this.editor;
json[r'faceDetection'] = this.faceDetection;
json[r'facialRecognition'] = this.facialRecognition;
json[r'integrityCheck'] = this.integrityCheck;
json[r'library'] = this.library_;
json[r'metadataExtraction'] = this.metadataExtraction;
json[r'migration'] = this.migration;
@@ -159,7 +153,6 @@ class QueuesResponseLegacyDto {
editor: QueueResponseLegacyDto.fromJson(json[r'editor'])!,
faceDetection: QueueResponseLegacyDto.fromJson(json[r'faceDetection'])!,
facialRecognition: QueueResponseLegacyDto.fromJson(json[r'facialRecognition'])!,
integrityCheck: QueueResponseLegacyDto.fromJson(json[r'integrityCheck'])!,
library_: QueueResponseLegacyDto.fromJson(json[r'library'])!,
metadataExtraction: QueueResponseLegacyDto.fromJson(json[r'metadataExtraction'])!,
migration: QueueResponseLegacyDto.fromJson(json[r'migration'])!,
@@ -225,7 +218,6 @@ class QueuesResponseLegacyDto {
'editor',
'faceDetection',
'facialRecognition',
'integrityCheck',
'library',
'metadataExtraction',
'migration',

View File

@@ -159,16 +159,10 @@ class RandomSearchDto {
/// Filter by person IDs
List<String> personIds;
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Number of results to return
@@ -565,7 +559,9 @@ class RandomSearchDto {
personIds: json[r'personIds'] is Iterable
? (json[r'personIds'] as Iterable).cast<String>().toList(growable: false)
: const [],
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
size: num.parse('${json[r'size']}'),
state: mapValueOfType<String>(json, r'state'),
tagIds: json[r'tagIds'] is Iterable

View File

@@ -199,16 +199,10 @@ class SmartSearchDto {
///
String? queryAssetId;
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Number of results to return
@@ -605,7 +599,9 @@ class SmartSearchDto {
: const [],
query: mapValueOfType<String>(json, r'query'),
queryAssetId: mapValueOfType<String>(json, r'queryAssetId'),
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
size: num.parse('${json[r'size']}'),
state: mapValueOfType<String>(json, r'state'),
tagIds: json[r'tagIds'] is Iterable

View File

@@ -164,16 +164,10 @@ class StatisticsSearchDto {
/// Filter by person IDs
List<String> personIds;
/// Filter by rating
/// Filter by rating [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Filter by state/province name
@@ -495,7 +489,9 @@ class StatisticsSearchDto {
personIds: json[r'personIds'] is Iterable
? (json[r'personIds'] as Iterable).cast<String>().toList(growable: false)
: const [],
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
state: mapValueOfType<String>(json, r'state'),
tagIds: json[r'tagIds'] is Iterable
? (json[r'tagIds'] as Iterable).cast<String>().toList(growable: false)

View File

@@ -0,0 +1,99 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SyncAssetEditDeleteV1 {
/// Returns a new [SyncAssetEditDeleteV1] instance.
SyncAssetEditDeleteV1({
required this.editId,
});
String editId;
@override
bool operator ==(Object other) => identical(this, other) || other is SyncAssetEditDeleteV1 &&
other.editId == editId;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(editId.hashCode);
@override
String toString() => 'SyncAssetEditDeleteV1[editId=$editId]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'editId'] = this.editId;
return json;
}
/// Returns a new [SyncAssetEditDeleteV1] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SyncAssetEditDeleteV1? fromJson(dynamic value) {
upgradeDto(value, "SyncAssetEditDeleteV1");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SyncAssetEditDeleteV1(
editId: mapValueOfType<String>(json, r'editId')!,
);
}
return null;
}
static List<SyncAssetEditDeleteV1> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SyncAssetEditDeleteV1>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SyncAssetEditDeleteV1.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SyncAssetEditDeleteV1> mapFromJson(dynamic json) {
final map = <String, SyncAssetEditDeleteV1>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SyncAssetEditDeleteV1.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SyncAssetEditDeleteV1-objects as value to a dart map
static Map<String, List<SyncAssetEditDeleteV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SyncAssetEditDeleteV1>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SyncAssetEditDeleteV1.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'editId',
};
}

View File

@@ -0,0 +1,131 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SyncAssetEditV1 {
/// Returns a new [SyncAssetEditV1] instance.
SyncAssetEditV1({
required this.action,
required this.assetId,
required this.id,
required this.parameters,
required this.sequence,
});
AssetEditAction action;
String assetId;
String id;
Object parameters;
int sequence;
@override
bool operator ==(Object other) => identical(this, other) || other is SyncAssetEditV1 &&
other.action == action &&
other.assetId == assetId &&
other.id == id &&
other.parameters == parameters &&
other.sequence == sequence;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(action.hashCode) +
(assetId.hashCode) +
(id.hashCode) +
(parameters.hashCode) +
(sequence.hashCode);
@override
String toString() => 'SyncAssetEditV1[action=$action, assetId=$assetId, id=$id, parameters=$parameters, sequence=$sequence]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'action'] = this.action;
json[r'assetId'] = this.assetId;
json[r'id'] = this.id;
json[r'parameters'] = this.parameters;
json[r'sequence'] = this.sequence;
return json;
}
/// Returns a new [SyncAssetEditV1] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SyncAssetEditV1? fromJson(dynamic value) {
upgradeDto(value, "SyncAssetEditV1");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SyncAssetEditV1(
action: AssetEditAction.fromJson(json[r'action'])!,
assetId: mapValueOfType<String>(json, r'assetId')!,
id: mapValueOfType<String>(json, r'id')!,
parameters: mapValueOfType<Object>(json, r'parameters')!,
sequence: mapValueOfType<int>(json, r'sequence')!,
);
}
return null;
}
static List<SyncAssetEditV1> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SyncAssetEditV1>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SyncAssetEditV1.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SyncAssetEditV1> mapFromJson(dynamic json) {
final map = <String, SyncAssetEditV1>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SyncAssetEditV1.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SyncAssetEditV1-objects as value to a dart map
static Map<String, List<SyncAssetEditV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SyncAssetEditV1>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SyncAssetEditV1.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'action',
'assetId',
'id',
'parameters',
'sequence',
};
}

View File

@@ -29,6 +29,8 @@ class SyncEntityType {
static const assetV1 = SyncEntityType._(r'AssetV1');
static const assetDeleteV1 = SyncEntityType._(r'AssetDeleteV1');
static const assetExifV1 = SyncEntityType._(r'AssetExifV1');
static const assetEditV1 = SyncEntityType._(r'AssetEditV1');
static const assetEditDeleteV1 = SyncEntityType._(r'AssetEditDeleteV1');
static const assetMetadataV1 = SyncEntityType._(r'AssetMetadataV1');
static const assetMetadataDeleteV1 = SyncEntityType._(r'AssetMetadataDeleteV1');
static const partnerV1 = SyncEntityType._(r'PartnerV1');
@@ -80,6 +82,8 @@ class SyncEntityType {
assetV1,
assetDeleteV1,
assetExifV1,
assetEditV1,
assetEditDeleteV1,
assetMetadataV1,
assetMetadataDeleteV1,
partnerV1,
@@ -166,6 +170,8 @@ class SyncEntityTypeTypeTransformer {
case r'AssetV1': return SyncEntityType.assetV1;
case r'AssetDeleteV1': return SyncEntityType.assetDeleteV1;
case r'AssetExifV1': return SyncEntityType.assetExifV1;
case r'AssetEditV1': return SyncEntityType.assetEditV1;
case r'AssetEditDeleteV1': return SyncEntityType.assetEditDeleteV1;
case r'AssetMetadataV1': return SyncEntityType.assetMetadataV1;
case r'AssetMetadataDeleteV1': return SyncEntityType.assetMetadataDeleteV1;
case r'PartnerV1': return SyncEntityType.partnerV1;

View File

@@ -30,6 +30,7 @@ class SyncRequestType {
static const albumAssetExifsV1 = SyncRequestType._(r'AlbumAssetExifsV1');
static const assetsV1 = SyncRequestType._(r'AssetsV1');
static const assetExifsV1 = SyncRequestType._(r'AssetExifsV1');
static const assetEditsV1 = SyncRequestType._(r'AssetEditsV1');
static const assetMetadataV1 = SyncRequestType._(r'AssetMetadataV1');
static const authUsersV1 = SyncRequestType._(r'AuthUsersV1');
static const memoriesV1 = SyncRequestType._(r'MemoriesV1');
@@ -54,6 +55,7 @@ class SyncRequestType {
albumAssetExifsV1,
assetsV1,
assetExifsV1,
assetEditsV1,
assetMetadataV1,
authUsersV1,
memoriesV1,
@@ -113,6 +115,7 @@ class SyncRequestTypeTypeTransformer {
case r'AlbumAssetExifsV1': return SyncRequestType.albumAssetExifsV1;
case r'AssetsV1': return SyncRequestType.assetsV1;
case r'AssetExifsV1': return SyncRequestType.assetExifsV1;
case r'AssetEditsV1': return SyncRequestType.assetEditsV1;
case r'AssetMetadataV1': return SyncRequestType.assetMetadataV1;
case r'AuthUsersV1': return SyncRequestType.authUsersV1;
case r'MemoriesV1': return SyncRequestType.memoriesV1;

View File

@@ -16,7 +16,6 @@ class SystemConfigDto {
required this.backup,
required this.ffmpeg,
required this.image,
required this.integrityChecks,
required this.job,
required this.library_,
required this.logging,
@@ -43,8 +42,6 @@ class SystemConfigDto {
SystemConfigImageDto image;
SystemConfigIntegrityChecks integrityChecks;
SystemConfigJobDto job;
SystemConfigLibraryDto library_;
@@ -86,7 +83,6 @@ class SystemConfigDto {
other.backup == backup &&
other.ffmpeg == ffmpeg &&
other.image == image &&
other.integrityChecks == integrityChecks &&
other.job == job &&
other.library_ == library_ &&
other.logging == logging &&
@@ -112,7 +108,6 @@ class SystemConfigDto {
(backup.hashCode) +
(ffmpeg.hashCode) +
(image.hashCode) +
(integrityChecks.hashCode) +
(job.hashCode) +
(library_.hashCode) +
(logging.hashCode) +
@@ -133,14 +128,13 @@ class SystemConfigDto {
(user.hashCode);
@override
String toString() => 'SystemConfigDto[backup=$backup, ffmpeg=$ffmpeg, image=$image, integrityChecks=$integrityChecks, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, nightlyTasks=$nightlyTasks, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, templates=$templates, theme=$theme, trash=$trash, user=$user]';
String toString() => 'SystemConfigDto[backup=$backup, ffmpeg=$ffmpeg, image=$image, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, nightlyTasks=$nightlyTasks, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, templates=$templates, theme=$theme, trash=$trash, user=$user]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'backup'] = this.backup;
json[r'ffmpeg'] = this.ffmpeg;
json[r'image'] = this.image;
json[r'integrityChecks'] = this.integrityChecks;
json[r'job'] = this.job;
json[r'library'] = this.library_;
json[r'logging'] = this.logging;
@@ -174,7 +168,6 @@ class SystemConfigDto {
backup: SystemConfigBackupsDto.fromJson(json[r'backup'])!,
ffmpeg: SystemConfigFFmpegDto.fromJson(json[r'ffmpeg'])!,
image: SystemConfigImageDto.fromJson(json[r'image'])!,
integrityChecks: SystemConfigIntegrityChecks.fromJson(json[r'integrityChecks'])!,
job: SystemConfigJobDto.fromJson(json[r'job'])!,
library_: SystemConfigLibraryDto.fromJson(json[r'library'])!,
logging: SystemConfigLoggingDto.fromJson(json[r'logging'])!,
@@ -243,7 +236,6 @@ class SystemConfigDto {
'backup',
'ffmpeg',
'image',
'integrityChecks',
'job',
'library',
'logging',

View File

@@ -1,115 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SystemConfigIntegrityChecks {
/// Returns a new [SystemConfigIntegrityChecks] instance.
SystemConfigIntegrityChecks({
required this.checksumFiles,
required this.missingFiles,
required this.untrackedFiles,
});
SystemConfigIntegrityChecksumJob checksumFiles;
SystemConfigIntegrityJob missingFiles;
SystemConfigIntegrityJob untrackedFiles;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigIntegrityChecks &&
other.checksumFiles == checksumFiles &&
other.missingFiles == missingFiles &&
other.untrackedFiles == untrackedFiles;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksumFiles.hashCode) +
(missingFiles.hashCode) +
(untrackedFiles.hashCode);
@override
String toString() => 'SystemConfigIntegrityChecks[checksumFiles=$checksumFiles, missingFiles=$missingFiles, untrackedFiles=$untrackedFiles]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'checksumFiles'] = this.checksumFiles;
json[r'missingFiles'] = this.missingFiles;
json[r'untrackedFiles'] = this.untrackedFiles;
return json;
}
/// Returns a new [SystemConfigIntegrityChecks] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SystemConfigIntegrityChecks? fromJson(dynamic value) {
upgradeDto(value, "SystemConfigIntegrityChecks");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SystemConfigIntegrityChecks(
checksumFiles: SystemConfigIntegrityChecksumJob.fromJson(json[r'checksumFiles'])!,
missingFiles: SystemConfigIntegrityJob.fromJson(json[r'missingFiles'])!,
untrackedFiles: SystemConfigIntegrityJob.fromJson(json[r'untrackedFiles'])!,
);
}
return null;
}
static List<SystemConfigIntegrityChecks> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SystemConfigIntegrityChecks>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SystemConfigIntegrityChecks.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SystemConfigIntegrityChecks> mapFromJson(dynamic json) {
final map = <String, SystemConfigIntegrityChecks>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SystemConfigIntegrityChecks.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SystemConfigIntegrityChecks-objects as value to a dart map
static Map<String, List<SystemConfigIntegrityChecks>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SystemConfigIntegrityChecks>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SystemConfigIntegrityChecks.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'checksumFiles',
'missingFiles',
'untrackedFiles',
};
}

View File

@@ -1,123 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SystemConfigIntegrityChecksumJob {
/// Returns a new [SystemConfigIntegrityChecksumJob] instance.
SystemConfigIntegrityChecksumJob({
required this.cronExpression,
required this.enabled,
required this.percentageLimit,
required this.timeLimit,
});
String cronExpression;
bool enabled;
num percentageLimit;
num timeLimit;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigIntegrityChecksumJob &&
other.cronExpression == cronExpression &&
other.enabled == enabled &&
other.percentageLimit == percentageLimit &&
other.timeLimit == timeLimit;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(cronExpression.hashCode) +
(enabled.hashCode) +
(percentageLimit.hashCode) +
(timeLimit.hashCode);
@override
String toString() => 'SystemConfigIntegrityChecksumJob[cronExpression=$cronExpression, enabled=$enabled, percentageLimit=$percentageLimit, timeLimit=$timeLimit]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'cronExpression'] = this.cronExpression;
json[r'enabled'] = this.enabled;
json[r'percentageLimit'] = this.percentageLimit;
json[r'timeLimit'] = this.timeLimit;
return json;
}
/// Returns a new [SystemConfigIntegrityChecksumJob] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SystemConfigIntegrityChecksumJob? fromJson(dynamic value) {
upgradeDto(value, "SystemConfigIntegrityChecksumJob");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SystemConfigIntegrityChecksumJob(
cronExpression: mapValueOfType<String>(json, r'cronExpression')!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
percentageLimit: num.parse('${json[r'percentageLimit']}'),
timeLimit: num.parse('${json[r'timeLimit']}'),
);
}
return null;
}
static List<SystemConfigIntegrityChecksumJob> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SystemConfigIntegrityChecksumJob>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SystemConfigIntegrityChecksumJob.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SystemConfigIntegrityChecksumJob> mapFromJson(dynamic json) {
final map = <String, SystemConfigIntegrityChecksumJob>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SystemConfigIntegrityChecksumJob.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SystemConfigIntegrityChecksumJob-objects as value to a dart map
static Map<String, List<SystemConfigIntegrityChecksumJob>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SystemConfigIntegrityChecksumJob>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SystemConfigIntegrityChecksumJob.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'cronExpression',
'enabled',
'percentageLimit',
'timeLimit',
};
}

View File

@@ -1,107 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SystemConfigIntegrityJob {
/// Returns a new [SystemConfigIntegrityJob] instance.
SystemConfigIntegrityJob({
required this.cronExpression,
required this.enabled,
});
String cronExpression;
bool enabled;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigIntegrityJob &&
other.cronExpression == cronExpression &&
other.enabled == enabled;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(cronExpression.hashCode) +
(enabled.hashCode);
@override
String toString() => 'SystemConfigIntegrityJob[cronExpression=$cronExpression, enabled=$enabled]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'cronExpression'] = this.cronExpression;
json[r'enabled'] = this.enabled;
return json;
}
/// Returns a new [SystemConfigIntegrityJob] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SystemConfigIntegrityJob? fromJson(dynamic value) {
upgradeDto(value, "SystemConfigIntegrityJob");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SystemConfigIntegrityJob(
cronExpression: mapValueOfType<String>(json, r'cronExpression')!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
);
}
return null;
}
static List<SystemConfigIntegrityJob> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SystemConfigIntegrityJob>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SystemConfigIntegrityJob.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SystemConfigIntegrityJob> mapFromJson(dynamic json) {
final map = <String, SystemConfigIntegrityJob>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SystemConfigIntegrityJob.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SystemConfigIntegrityJob-objects as value to a dart map
static Map<String, List<SystemConfigIntegrityJob>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SystemConfigIntegrityJob>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SystemConfigIntegrityJob.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'cronExpression',
'enabled',
};
}

View File

@@ -16,7 +16,6 @@ class SystemConfigJobDto {
required this.backgroundTask,
required this.editor,
required this.faceDetection,
required this.integrityCheck,
required this.library_,
required this.metadataExtraction,
required this.migration,
@@ -36,8 +35,6 @@ class SystemConfigJobDto {
JobSettingsDto faceDetection;
JobSettingsDto integrityCheck;
JobSettingsDto library_;
JobSettingsDto metadataExtraction;
@@ -65,7 +62,6 @@ class SystemConfigJobDto {
other.backgroundTask == backgroundTask &&
other.editor == editor &&
other.faceDetection == faceDetection &&
other.integrityCheck == integrityCheck &&
other.library_ == library_ &&
other.metadataExtraction == metadataExtraction &&
other.migration == migration &&
@@ -84,7 +80,6 @@ class SystemConfigJobDto {
(backgroundTask.hashCode) +
(editor.hashCode) +
(faceDetection.hashCode) +
(integrityCheck.hashCode) +
(library_.hashCode) +
(metadataExtraction.hashCode) +
(migration.hashCode) +
@@ -98,14 +93,13 @@ class SystemConfigJobDto {
(workflow.hashCode);
@override
String toString() => 'SystemConfigJobDto[backgroundTask=$backgroundTask, editor=$editor, faceDetection=$faceDetection, integrityCheck=$integrityCheck, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, ocr=$ocr, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion, workflow=$workflow]';
String toString() => 'SystemConfigJobDto[backgroundTask=$backgroundTask, editor=$editor, faceDetection=$faceDetection, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, ocr=$ocr, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion, workflow=$workflow]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'backgroundTask'] = this.backgroundTask;
json[r'editor'] = this.editor;
json[r'faceDetection'] = this.faceDetection;
json[r'integrityCheck'] = this.integrityCheck;
json[r'library'] = this.library_;
json[r'metadataExtraction'] = this.metadataExtraction;
json[r'migration'] = this.migration;
@@ -132,7 +126,6 @@ class SystemConfigJobDto {
backgroundTask: JobSettingsDto.fromJson(json[r'backgroundTask'])!,
editor: JobSettingsDto.fromJson(json[r'editor'])!,
faceDetection: JobSettingsDto.fromJson(json[r'faceDetection'])!,
integrityCheck: JobSettingsDto.fromJson(json[r'integrityCheck'])!,
library_: JobSettingsDto.fromJson(json[r'library'])!,
metadataExtraction: JobSettingsDto.fromJson(json[r'metadataExtraction'])!,
migration: JobSettingsDto.fromJson(json[r'migration'])!,
@@ -194,7 +187,6 @@ class SystemConfigJobDto {
'backgroundTask',
'editor',
'faceDetection',
'integrityCheck',
'library',
'metadataExtraction',
'migration',

View File

@@ -71,16 +71,10 @@ class UpdateAssetDto {
///
num? longitude;
/// Rating
/// Rating in range [1-5], or null for unrated
///
/// Minimum value: -1
/// Maximum value: 5
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
num? rating;
/// Asset visibility
@@ -178,7 +172,9 @@ class UpdateAssetDto {
latitude: num.parse('${json[r'latitude']}'),
livePhotoVideoId: mapValueOfType<String>(json, r'livePhotoVideoId'),
longitude: num.parse('${json[r'longitude']}'),
rating: num.parse('${json[r'rating']}'),
rating: json[r'rating'] == null
? null
: num.parse('${json[r'rating']}'),
visibility: AssetVisibility.fromJson(json[r'visibility']),
);
}

View File

@@ -560,296 +560,6 @@
"x-immich-state": "Alpha"
}
},
"/admin/integrity/report": {
"get": {
"description": "Get all flagged items by integrity report type",
"operationId": "getIntegrityReport",
"parameters": [
{
"name": "cursor",
"required": false,
"in": "query",
"description": "Cursor for pagination",
"schema": {
"format": "uuid",
"default": 1,
"type": "string"
}
},
{
"name": "limit",
"required": false,
"in": "query",
"description": "Number of items per page",
"schema": {
"minimum": 1,
"default": 500,
"type": "number"
}
},
{
"name": "type",
"required": true,
"in": "query",
"schema": {
"$ref": "#/components/schemas/IntegrityReportType"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IntegrityReportResponseDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Get integrity report by type",
"tags": [
"Maintenance (admin)"
],
"x-immich-admin-only": true,
"x-immich-history": [
{
"version": "v2.6.0",
"state": "Added"
},
{
"version": "v2.6.0",
"state": "Alpha"
}
],
"x-immich-permission": "maintenance",
"x-immich-state": "Alpha"
}
},
"/admin/integrity/report/{id}": {
"delete": {
"description": "Delete a given report item and perform corresponding deletion (e.g. trash asset, delete file)",
"operationId": "deleteIntegrityReport",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Delete integrity report item",
"tags": [
"Maintenance (admin)"
],
"x-immich-admin-only": true,
"x-immich-history": [
{
"version": "v2.6.0",
"state": "Added"
},
{
"version": "v2.6.0",
"state": "Alpha"
}
],
"x-immich-permission": "maintenance",
"x-immich-state": "Alpha"
}
},
"/admin/integrity/report/{id}/file": {
"get": {
"description": "Download the untracked/broken file if one exists",
"operationId": "getIntegrityReportFile",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/octet-stream": {
"schema": {
"format": "binary",
"type": "string"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Download flagged file",
"tags": [
"Maintenance (admin)"
],
"x-immich-admin-only": true,
"x-immich-history": [
{
"version": "v2.6.0",
"state": "Added"
},
{
"version": "v2.6.0",
"state": "Alpha"
}
],
"x-immich-permission": "maintenance",
"x-immich-state": "Alpha"
}
},
"/admin/integrity/report/{type}/csv": {
"get": {
"description": "Get all integrity report entries for a given type as a CSV",
"operationId": "getIntegrityReportCsv",
"parameters": [
{
"name": "type",
"required": true,
"in": "path",
"schema": {
"$ref": "#/components/schemas/IntegrityReportType"
}
}
],
"responses": {
"200": {
"content": {
"application/octet-stream": {
"schema": {
"format": "binary",
"type": "string"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Export integrity report by type as CSV",
"tags": [
"Maintenance (admin)"
],
"x-immich-admin-only": true,
"x-immich-history": [
{
"version": "v2.6.0",
"state": "Added"
},
{
"version": "v2.6.0",
"state": "Alpha"
}
],
"x-immich-permission": "maintenance",
"x-immich-state": "Alpha"
}
},
"/admin/integrity/summary": {
"get": {
"description": "Get a count of the items flagged in each integrity report",
"operationId": "getIntegrityReportSummary",
"parameters": [],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IntegrityReportSummaryResponseDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Get integrity report summary",
"tags": [
"Maintenance (admin)"
],
"x-immich-admin-only": true,
"x-immich-history": [
{
"version": "v2.6.0",
"state": "Added"
},
{
"version": "v2.6.0",
"state": "Alpha"
}
],
"x-immich-permission": "maintenance",
"x-immich-state": "Alpha"
}
},
"/admin/maintenance": {
"post": {
"description": "Put Immich into or take it out of maintenance mode",
@@ -9697,10 +9407,27 @@
"name": "rating",
"required": false,
"in": "query",
"description": "Filter by rating",
"description": "Filter by rating [1-5], or null for unrated",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable",
"schema": {
"minimum": -1,
"maximum": 5,
"nullable": true,
"type": "number"
}
},
@@ -15482,10 +15209,6 @@
"name": "Faces",
"description": "A face is a detected human face within an asset, which can be associated with a person. Faces are normally detected via machine learning, but can also be created via manually."
},
{
"name": "Integrity (admin)",
"description": "Endpoints for viewing and managing integrity reports."
},
{
"name": "Jobs",
"description": "Queues and background jobs are used for processing tasks asynchronously. Queues can be paused and resumed as needed."
@@ -16166,10 +15889,27 @@
"type": "number"
},
"rating": {
"description": "Rating",
"description": "Rating in range [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"timeZone": {
"description": "Time zone (IANA timezone)",
@@ -18373,73 +18113,6 @@
],
"type": "string"
},
"IntegrityReportDto": {
"properties": {
"id": {
"type": "string"
},
"path": {
"type": "string"
},
"type": {
"allOf": [
{
"$ref": "#/components/schemas/IntegrityReportType"
}
]
}
},
"required": [
"id",
"path",
"type"
],
"type": "object"
},
"IntegrityReportResponseDto": {
"properties": {
"items": {
"items": {
"$ref": "#/components/schemas/IntegrityReportDto"
},
"type": "array"
},
"nextCursor": {
"type": "string"
}
},
"required": [
"items"
],
"type": "object"
},
"IntegrityReportSummaryResponseDto": {
"properties": {
"checksum_mismatch": {
"type": "integer"
},
"missing_file": {
"type": "integer"
},
"untracked_file": {
"type": "integer"
}
},
"required": [
"checksum_mismatch",
"missing_file",
"untracked_file"
],
"type": "object"
},
"IntegrityReportType": {
"enum": [
"untracked_file",
"missing_file",
"checksum_mismatch"
],
"type": "string"
},
"JobCreateDto": {
"properties": {
"name": {
@@ -18514,17 +18187,7 @@
"VersionCheck",
"OcrQueueAll",
"Ocr",
"WorkflowRun",
"IntegrityUntrackedFilesQueueAll",
"IntegrityUntrackedFiles",
"IntegrityUntrackedRefresh",
"IntegrityMissingFilesQueueAll",
"IntegrityMissingFiles",
"IntegrityMissingFilesRefresh",
"IntegrityChecksumFiles",
"IntegrityChecksumFilesRefresh",
"IntegrityDeleteReportType",
"IntegrityDeleteReports"
"WorkflowRun"
],
"type": "string"
},
@@ -18905,16 +18568,7 @@
"user-cleanup",
"memory-cleanup",
"memory-create",
"backup-database",
"integrity-missing-files",
"integrity-untracked-files",
"integrity-checksum-mismatch",
"integrity-missing-files-refresh",
"integrity-untracked-files-refresh",
"integrity-checksum-mismatch-refresh",
"integrity-missing-files-delete-all",
"integrity-untracked-files-delete-all",
"integrity-checksum-mismatch-delete-all"
"backup-database"
],
"type": "string"
},
@@ -19368,10 +19022,27 @@
"type": "string"
},
"rating": {
"description": "Filter by rating",
"description": "Filter by rating [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"size": {
"description": "Number of results to return",
@@ -20821,7 +20492,6 @@
"backupDatabase",
"ocr",
"workflow",
"integrityCheck",
"editor"
],
"type": "string"
@@ -20949,9 +20619,6 @@
"facialRecognition": {
"$ref": "#/components/schemas/QueueResponseLegacyDto"
},
"integrityCheck": {
"$ref": "#/components/schemas/QueueResponseLegacyDto"
},
"library": {
"$ref": "#/components/schemas/QueueResponseLegacyDto"
},
@@ -20996,7 +20663,6 @@
"editor",
"faceDetection",
"facialRecognition",
"integrityCheck",
"library",
"metadataExtraction",
"migration",
@@ -21099,10 +20765,27 @@
"type": "array"
},
"rating": {
"description": "Filter by rating",
"description": "Filter by rating [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"size": {
"description": "Number of results to return",
@@ -22473,10 +22156,27 @@
"type": "string"
},
"rating": {
"description": "Filter by rating",
"description": "Filter by rating [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"size": {
"description": "Number of results to return",
@@ -22707,10 +22407,27 @@
"type": "array"
},
"rating": {
"description": "Filter by rating",
"description": "Filter by rating [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"state": {
"description": "Filter by state/province name",
@@ -22999,6 +22716,48 @@
],
"type": "object"
},
"SyncAssetEditDeleteV1": {
"properties": {
"editId": {
"type": "string"
}
},
"required": [
"editId"
],
"type": "object"
},
"SyncAssetEditV1": {
"properties": {
"action": {
"allOf": [
{
"$ref": "#/components/schemas/AssetEditAction"
}
]
},
"assetId": {
"type": "string"
},
"id": {
"type": "string"
},
"parameters": {
"type": "object"
},
"sequence": {
"type": "integer"
}
},
"required": [
"action",
"assetId",
"id",
"parameters",
"sequence"
],
"type": "object"
},
"SyncAssetExifV1": {
"properties": {
"assetId": {
@@ -23548,6 +23307,8 @@
"AssetV1",
"AssetDeleteV1",
"AssetExifV1",
"AssetEditV1",
"AssetEditDeleteV1",
"AssetMetadataV1",
"AssetMetadataDeleteV1",
"PartnerV1",
@@ -23845,6 +23606,7 @@
"AlbumAssetExifsV1",
"AssetsV1",
"AssetExifsV1",
"AssetEditsV1",
"AssetMetadataV1",
"AuthUsersV1",
"MemoriesV1",
@@ -24062,9 +23824,6 @@
"image": {
"$ref": "#/components/schemas/SystemConfigImageDto"
},
"integrityChecks": {
"$ref": "#/components/schemas/SystemConfigIntegrityChecks"
},
"job": {
"$ref": "#/components/schemas/SystemConfigJobDto"
},
@@ -24124,7 +23883,6 @@
"backup",
"ffmpeg",
"image",
"integrityChecks",
"job",
"library",
"logging",
@@ -24410,63 +24168,6 @@
],
"type": "object"
},
"SystemConfigIntegrityChecks": {
"properties": {
"checksumFiles": {
"$ref": "#/components/schemas/SystemConfigIntegrityChecksumJob"
},
"missingFiles": {
"$ref": "#/components/schemas/SystemConfigIntegrityJob"
},
"untrackedFiles": {
"$ref": "#/components/schemas/SystemConfigIntegrityJob"
}
},
"required": [
"checksumFiles",
"missingFiles",
"untrackedFiles"
],
"type": "object"
},
"SystemConfigIntegrityChecksumJob": {
"properties": {
"cronExpression": {
"type": "string"
},
"enabled": {
"type": "boolean"
},
"percentageLimit": {
"type": "number"
},
"timeLimit": {
"type": "number"
}
},
"required": [
"cronExpression",
"enabled",
"percentageLimit",
"timeLimit"
],
"type": "object"
},
"SystemConfigIntegrityJob": {
"properties": {
"cronExpression": {
"type": "string"
},
"enabled": {
"type": "boolean"
}
},
"required": [
"cronExpression",
"enabled"
],
"type": "object"
},
"SystemConfigJobDto": {
"properties": {
"backgroundTask": {
@@ -24478,9 +24179,6 @@
"faceDetection": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"integrityCheck": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"library": {
"$ref": "#/components/schemas/JobSettingsDto"
},
@@ -24519,7 +24217,6 @@
"backgroundTask",
"editor",
"faceDetection",
"integrityCheck",
"library",
"metadataExtraction",
"migration",
@@ -25614,10 +25311,27 @@
"type": "number"
},
"rating": {
"description": "Rating",
"description": "Rating in range [1-5], or null for unrated",
"maximum": 5,
"minimum": -1,
"type": "number"
"nullable": true,
"type": "number",
"x-immich-history": [
{
"version": "v1",
"state": "Added"
},
{
"version": "v2",
"state": "Stable"
},
{
"version": "v2.6.0",
"state": "Updated",
"description": "Using -1 as a rating is deprecated and will be removed in the next major version."
}
],
"x-immich-state": "Stable"
},
"visibility": {
"allOf": [

View File

@@ -70,20 +70,6 @@ export type DatabaseBackupListResponseDto = {
export type DatabaseBackupUploadDto = {
file?: Blob;
};
export type IntegrityReportDto = {
id: string;
path: string;
"type": IntegrityReportType;
};
export type IntegrityReportResponseDto = {
items: IntegrityReportDto[];
nextCursor?: string;
};
export type IntegrityReportSummaryResponseDto = {
checksum_mismatch: number;
missing_file: number;
untracked_file: number;
};
export type SetMaintenanceModeDto = {
/** Maintenance action */
action: MaintenanceAction;
@@ -848,8 +834,8 @@ export type AssetBulkUpdateDto = {
latitude?: number;
/** Longitude coordinate */
longitude?: number;
/** Rating */
rating?: number;
/** Rating in range [1-5], or null for unrated */
rating?: number | null;
/** Time zone (IANA timezone) */
timeZone?: string;
/** Asset visibility */
@@ -958,8 +944,8 @@ export type UpdateAssetDto = {
livePhotoVideoId?: string | null;
/** Longitude coordinate */
longitude?: number;
/** Rating */
rating?: number;
/** Rating in range [1-5], or null for unrated */
rating?: number | null;
/** Asset visibility */
visibility?: AssetVisibility;
};
@@ -1273,7 +1259,6 @@ export type QueuesResponseLegacyDto = {
editor: QueueResponseLegacyDto;
faceDetection: QueueResponseLegacyDto;
facialRecognition: QueueResponseLegacyDto;
integrityCheck: QueueResponseLegacyDto;
library: QueueResponseLegacyDto;
metadataExtraction: QueueResponseLegacyDto;
migration: QueueResponseLegacyDto;
@@ -1726,8 +1711,8 @@ export type MetadataSearchDto = {
personIds?: string[];
/** Filter by preview file path */
previewPath?: string;
/** Filter by rating */
rating?: number;
/** Filter by rating [1-5], or null for unrated */
rating?: number | null;
/** Number of results to return */
size?: number;
/** Filter by state/province name */
@@ -1842,8 +1827,8 @@ export type RandomSearchDto = {
ocr?: string;
/** Filter by person IDs */
personIds?: string[];
/** Filter by rating */
rating?: number;
/** Filter by rating [1-5], or null for unrated */
rating?: number | null;
/** Number of results to return */
size?: number;
/** Filter by state/province name */
@@ -1918,8 +1903,8 @@ export type SmartSearchDto = {
query?: string;
/** Asset ID to use as search reference */
queryAssetId?: string;
/** Filter by rating */
rating?: number;
/** Filter by rating [1-5], or null for unrated */
rating?: number | null;
/** Number of results to return */
size?: number;
/** Filter by state/province name */
@@ -1984,8 +1969,8 @@ export type StatisticsSearchDto = {
ocr?: string;
/** Filter by person IDs */
personIds?: string[];
/** Filter by rating */
rating?: number;
/** Filter by rating [1-5], or null for unrated */
rating?: number | null;
/** Filter by state/province name */
state?: string | null;
/** Filter by tag IDs */
@@ -2483,21 +2468,6 @@ export type SystemConfigImageDto = {
preview: SystemConfigGeneratedImageDto;
thumbnail: SystemConfigGeneratedImageDto;
};
export type SystemConfigIntegrityChecksumJob = {
cronExpression: string;
enabled: boolean;
percentageLimit: number;
timeLimit: number;
};
export type SystemConfigIntegrityJob = {
cronExpression: string;
enabled: boolean;
};
export type SystemConfigIntegrityChecks = {
checksumFiles: SystemConfigIntegrityChecksumJob;
missingFiles: SystemConfigIntegrityJob;
untrackedFiles: SystemConfigIntegrityJob;
};
export type JobSettingsDto = {
/** Concurrency */
concurrency: number;
@@ -2506,7 +2476,6 @@ export type SystemConfigJobDto = {
backgroundTask: JobSettingsDto;
editor: JobSettingsDto;
faceDetection: JobSettingsDto;
integrityCheck: JobSettingsDto;
library: JobSettingsDto;
metadataExtraction: JobSettingsDto;
migration: JobSettingsDto;
@@ -2709,7 +2678,6 @@ export type SystemConfigDto = {
backup: SystemConfigBackupsDto;
ffmpeg: SystemConfigFFmpegDto;
image: SystemConfigImageDto;
integrityChecks: SystemConfigIntegrityChecks;
job: SystemConfigJobDto;
library: SystemConfigLibraryDto;
logging: SystemConfigLoggingDto;
@@ -2999,6 +2967,16 @@ export type SyncAssetDeleteV1 = {
/** Asset ID */
assetId: string;
};
export type SyncAssetEditDeleteV1 = {
editId: string;
};
export type SyncAssetEditV1 = {
action: AssetEditAction;
assetId: string;
id: string;
parameters: object;
sequence: number;
};
export type SyncAssetExifV1 = {
/** Asset ID */
assetId: string;
@@ -3437,73 +3415,6 @@ export function downloadDatabaseBackup({ filename }: {
...opts
}));
}
/**
* Get integrity report by type
*/
export function getIntegrityReport({ cursor, limit, $type }: {
cursor?: string;
limit?: number;
$type: IntegrityReportType;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: IntegrityReportResponseDto;
}>(`/admin/integrity/report${QS.query(QS.explode({
cursor,
limit,
"type": $type
}))}`, {
...opts
}));
}
/**
* Delete integrity report item
*/
export function deleteIntegrityReport({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/admin/integrity/report/${encodeURIComponent(id)}`, {
...opts,
method: "DELETE"
}));
}
/**
* Download flagged file
*/
export function getIntegrityReportFile({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchBlob<{
status: 200;
data: Blob;
}>(`/admin/integrity/report/${encodeURIComponent(id)}/file`, {
...opts
}));
}
/**
* Export integrity report by type as CSV
*/
export function getIntegrityReportCsv({ $type }: {
$type: IntegrityReportType;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchBlob<{
status: 200;
data: Blob;
}>(`/admin/integrity/report/${encodeURIComponent($type)}/csv`, {
...opts
}));
}
/**
* Get integrity report summary
*/
export function getIntegrityReportSummary(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: IntegrityReportSummaryResponseDto;
}>("/admin/integrity/summary", {
...opts
}));
}
/**
* Set maintenance mode
*/
@@ -5543,7 +5454,7 @@ export function searchLargeAssets({ albumIds, city, country, createdAfter, creat
model?: string | null;
ocr?: string;
personIds?: string[];
rating?: number;
rating?: number | null;
size?: number;
state?: string | null;
tagIds?: string[] | null;
@@ -6921,11 +6832,6 @@ export enum UserAvatarColor {
Gray = "gray",
Amber = "amber"
}
export enum IntegrityReportType {
UntrackedFile = "untracked_file",
MissingFile = "missing_file",
ChecksumMismatch = "checksum_mismatch"
}
export enum MaintenanceAction {
Start = "start",
End = "end",
@@ -7194,16 +7100,7 @@ export enum ManualJobName {
UserCleanup = "user-cleanup",
MemoryCleanup = "memory-cleanup",
MemoryCreate = "memory-create",
BackupDatabase = "backup-database",
IntegrityMissingFiles = "integrity-missing-files",
IntegrityUntrackedFiles = "integrity-untracked-files",
IntegrityChecksumMismatch = "integrity-checksum-mismatch",
IntegrityMissingFilesRefresh = "integrity-missing-files-refresh",
IntegrityUntrackedFilesRefresh = "integrity-untracked-files-refresh",
IntegrityChecksumMismatchRefresh = "integrity-checksum-mismatch-refresh",
IntegrityMissingFilesDeleteAll = "integrity-missing-files-delete-all",
IntegrityUntrackedFilesDeleteAll = "integrity-untracked-files-delete-all",
IntegrityChecksumMismatchDeleteAll = "integrity-checksum-mismatch-delete-all"
BackupDatabase = "backup-database"
}
export enum QueueName {
ThumbnailGeneration = "thumbnailGeneration",
@@ -7223,7 +7120,6 @@ export enum QueueName {
BackupDatabase = "backupDatabase",
Ocr = "ocr",
Workflow = "workflow",
IntegrityCheck = "integrityCheck",
Editor = "editor"
}
export enum QueueCommand {
@@ -7318,17 +7214,7 @@ export enum JobName {
VersionCheck = "VersionCheck",
OcrQueueAll = "OcrQueueAll",
Ocr = "Ocr",
WorkflowRun = "WorkflowRun",
IntegrityUntrackedFilesQueueAll = "IntegrityUntrackedFilesQueueAll",
IntegrityUntrackedFiles = "IntegrityUntrackedFiles",
IntegrityUntrackedRefresh = "IntegrityUntrackedRefresh",
IntegrityMissingFilesQueueAll = "IntegrityMissingFilesQueueAll",
IntegrityMissingFiles = "IntegrityMissingFiles",
IntegrityMissingFilesRefresh = "IntegrityMissingFilesRefresh",
IntegrityChecksumFiles = "IntegrityChecksumFiles",
IntegrityChecksumFilesRefresh = "IntegrityChecksumFilesRefresh",
IntegrityDeleteReportType = "IntegrityDeleteReportType",
IntegrityDeleteReports = "IntegrityDeleteReports"
WorkflowRun = "WorkflowRun"
}
export enum SearchSuggestionType {
Country = "country",
@@ -7354,6 +7240,8 @@ export enum SyncEntityType {
AssetV1 = "AssetV1",
AssetDeleteV1 = "AssetDeleteV1",
AssetExifV1 = "AssetExifV1",
AssetEditV1 = "AssetEditV1",
AssetEditDeleteV1 = "AssetEditDeleteV1",
AssetMetadataV1 = "AssetMetadataV1",
AssetMetadataDeleteV1 = "AssetMetadataDeleteV1",
PartnerV1 = "PartnerV1",
@@ -7405,6 +7293,7 @@ export enum SyncRequestType {
AlbumAssetExifsV1 = "AlbumAssetExifsV1",
AssetsV1 = "AssetsV1",
AssetExifsV1 = "AssetExifsV1",
AssetEditsV1 = "AssetEditsV1",
AssetMetadataV1 = "AssetMetadataV1",
AuthUsersV1 = "AuthUsersV1",
MemoriesV1 = "MemoriesV1",

View File

@@ -46,22 +46,6 @@ export type SystemConfig = {
accelDecode: boolean;
tonemap: ToneMapping;
};
integrityChecks: {
missingFiles: {
enabled: boolean;
cronExpression: string;
};
untrackedFiles: {
enabled: boolean;
cronExpression: string;
};
checksumFiles: {
enabled: boolean;
cronExpression: string;
timeLimit: number;
percentageLimit: number;
};
};
job: Record<ConcurrentQueueName, { concurrency: number }>;
logging: {
enabled: boolean;
@@ -238,22 +222,6 @@ export const defaults = Object.freeze<SystemConfig>({
accel: TranscodeHardwareAcceleration.Disabled,
accelDecode: false,
},
integrityChecks: {
missingFiles: {
enabled: true,
cronExpression: CronExpression.EVERY_DAY_AT_3AM,
},
untrackedFiles: {
enabled: true,
cronExpression: CronExpression.EVERY_DAY_AT_3AM,
},
checksumFiles: {
enabled: true,
cronExpression: CronExpression.EVERY_DAY_AT_3AM,
timeLimit: 60 * 60 * 1000, // 1 hour
percentageLimit: 1, // 100% of assets
},
},
job: {
[QueueName.BackgroundTask]: { concurrency: 5 },
[QueueName.SmartSearch]: { concurrency: 2 },
@@ -268,7 +236,6 @@ export const defaults = Object.freeze<SystemConfig>({
[QueueName.Notification]: { concurrency: 5 },
[QueueName.Ocr]: { concurrency: 1 },
[QueueName.Workflow]: { concurrency: 5 },
[QueueName.IntegrityCheck]: { concurrency: 1 },
[QueueName.Editor]: { concurrency: 2 },
},
logging: {

View File

@@ -154,7 +154,6 @@ export const endpointTags: Record<ApiTag, string> = {
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
[ApiTag.Faces]:
'A face is a detected human face within an asset, which can be associated with a person. Faces are normally detected via machine learning, but can also be created via manually.',
[ApiTag.Integrity]: 'Endpoints for viewing and managing integrity reports.',
[ApiTag.Jobs]:
'Queues and background jobs are used for processing tasks asynchronously. Queues can be paused and resumed as needed.',
[ApiTag.Libraries]:

View File

@@ -207,12 +207,28 @@ describe(AssetController.name, () => {
});
it('should reject invalid rating', async () => {
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: -2 }]) {
const { status, body } = await request(ctx.getHttpServer()).put(`/assets/${factory.uuid()}`).send(test);
expect(status).toBe(400);
expect(body).toEqual(factory.responses.badRequest());
}
});
it('should convert rating 0 to null', async () => {
const assetId = factory.uuid();
const { status } = await request(ctx.getHttpServer()).put(`/assets/${assetId}`).send({ rating: 0 });
expect(service.update).toHaveBeenCalledWith(undefined, assetId, { rating: null });
expect(status).toBe(200);
});
it('should leave correct ratings as-is', async () => {
const assetId = factory.uuid();
for (const test of [{ rating: -1 }, { rating: 1 }, { rating: 5 }]) {
const { status } = await request(ctx.getHttpServer()).put(`/assets/${assetId}`).send(test);
expect(service.update).toHaveBeenCalledWith(undefined, assetId, test);
expect(status).toBe(200);
}
});
});
describe('GET /assets/statistics', () => {

View File

@@ -10,7 +10,6 @@ import { DatabaseBackupController } from 'src/controllers/database-backup.contro
import { DownloadController } from 'src/controllers/download.controller';
import { DuplicateController } from 'src/controllers/duplicate.controller';
import { FaceController } from 'src/controllers/face.controller';
import { IntegrityController } from 'src/controllers/integrity.controller';
import { JobController } from 'src/controllers/job.controller';
import { LibraryController } from 'src/controllers/library.controller';
import { MaintenanceController } from 'src/controllers/maintenance.controller';
@@ -52,7 +51,6 @@ export const controllers = [
DownloadController,
DuplicateController,
FaceController,
IntegrityController,
JobController,
LibraryController,
MaintenanceController,

View File

@@ -1,91 +0,0 @@
import { Controller, Delete, Get, HttpCode, HttpStatus, Next, Param, Query, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { NextFunction, Response } from 'express';
import { Endpoint, HistoryBuilder } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import {
IntegrityGetReportDto,
IntegrityReportResponseDto,
IntegrityReportSummaryResponseDto,
} from 'src/dtos/integrity.dto';
import { ApiTag, Permission } from 'src/enum';
import { Auth, Authenticated, FileResponse } from 'src/middleware/auth.guard';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { IntegrityService } from 'src/services/integrity.service';
import { sendFile } from 'src/utils/file';
import { IntegrityReportTypeParamDto, UUIDv7ParamDto } from 'src/validation';
@ApiTags(ApiTag.Maintenance)
@Controller('admin/integrity')
export class IntegrityController {
constructor(
private logger: LoggingRepository,
private service: IntegrityService,
) {}
@Get('summary')
@Endpoint({
summary: 'Get integrity report summary',
description: 'Get a count of the items flagged in each integrity report',
history: new HistoryBuilder().added('v2.6.0').alpha('v2.6.0'),
})
@Authenticated({ permission: Permission.Maintenance, admin: true })
getIntegrityReportSummary(): Promise<IntegrityReportSummaryResponseDto> {
return this.service.getIntegrityReportSummary();
}
@Get('report')
@HttpCode(HttpStatus.OK)
@Endpoint({
summary: 'Get integrity report by type',
description: 'Get all flagged items by integrity report type',
history: new HistoryBuilder().added('v2.6.0').alpha('v2.6.0'),
})
@Authenticated({ permission: Permission.Maintenance, admin: true })
getIntegrityReport(@Query() dto: IntegrityGetReportDto): Promise<IntegrityReportResponseDto> {
return this.service.getIntegrityReport(dto);
}
@Get('report/:id/file')
@Endpoint({
summary: 'Download flagged file',
description: 'Download the untracked/broken file if one exists',
history: new HistoryBuilder().added('v2.6.0').alpha('v2.6.0'),
})
@FileResponse()
@Authenticated({ permission: Permission.Maintenance, admin: true })
async getIntegrityReportFile(
@Param() { id }: UUIDv7ParamDto,
@Res() res: Response,
@Next() next: NextFunction,
): Promise<void> {
await sendFile(res, next, () => this.service.getIntegrityReportFile(id), this.logger);
}
@Delete('report/:id')
@Endpoint({
summary: 'Delete integrity report item',
description: 'Delete a given report item and perform corresponding deletion (e.g. trash asset, delete file)',
history: new HistoryBuilder().added('v2.6.0').alpha('v2.6.0'),
})
@Authenticated({ permission: Permission.Maintenance, admin: true })
async deleteIntegrityReport(@Auth() auth: AuthDto, @Param() { id }: UUIDv7ParamDto): Promise<void> {
await this.service.deleteIntegrityReport(auth.user.id, id);
}
@Get('report/:type/csv')
@Endpoint({
summary: 'Export integrity report by type as CSV',
description: 'Get all integrity report entries for a given type as a CSV',
history: new HistoryBuilder().added('v2.6.0').alpha('v2.6.0'),
})
@FileResponse()
@Authenticated({ permission: Permission.Maintenance, admin: true })
getIntegrityReportCsv(@Param() { type }: IntegrityReportTypeParamDto, @Res() res: Response): void {
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Cache-Control', 'private, no-cache, no-transform');
res.setHeader('Content-Disposition', `attachment; filename="${encodeURIComponent(`${Date.now()}-${type}.csv`)}"`);
this.service.getIntegrityReportCsv(type).pipe(res);
}
}

View File

@@ -437,6 +437,13 @@ export const columns = {
'asset_exif.rating',
'asset_exif.fps',
],
syncAssetEdit: [
'asset_edit.id',
'asset_edit.assetId',
'asset_edit.sequence',
'asset_edit.action',
'asset_edit.parameters',
],
exif: [
'asset_exif.assetId',
'asset_exif.autoStackId',

View File

@@ -1,5 +1,5 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { Transform, Type } from 'class-transformer';
import {
IsArray,
IsDateString,
@@ -16,6 +16,7 @@ import {
ValidateIf,
ValidateNested,
} from 'class-validator';
import { HistoryBuilder, Property } from 'src/decorators';
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
import { AssetType, AssetVisibility } from 'src/enum';
import { AssetStats } from 'src/repositories/asset.repository';
@@ -56,12 +57,19 @@ export class UpdateAssetBase {
@IsNotEmpty()
longitude?: number;
@ApiProperty({ description: 'Rating' })
@Optional()
@Property({
description: 'Rating in range [1-5], or null for unrated',
history: new HistoryBuilder()
.added('v1')
.stable('v2')
.updated('v2.6.0', 'Using -1 as a rating is deprecated and will be removed in the next major version.'),
})
@Optional({ nullable: true })
@IsInt()
@Max(5)
@Min(-1)
rating?: number;
@Transform(({ value }) => (value === 0 ? null : value))
rating?: number | null;
@ApiProperty({ description: 'Asset description' })
@Optional()

View File

@@ -1,7 +1,6 @@
import { ApiProperty, getSchemaPath } from '@nestjs/swagger';
import { ApiExtraModels, ApiProperty, getSchemaPath } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { ArrayMinSize, IsEnum, IsInt, Min, ValidateNested } from 'class-validator';
import { ExtraModel } from 'src/dtos/sync.dto';
import { IsAxisAlignedRotation, IsUniqueEditActions, ValidateEnum, ValidateUUID } from 'src/validation';
export enum AssetEditAction {
@@ -15,7 +14,6 @@ export enum MirrorAxis {
Vertical = 'vertical',
}
@ExtraModel()
export class CropParameters {
@IsInt()
@Min(0)
@@ -38,14 +36,12 @@ export class CropParameters {
height!: number;
}
@ExtraModel()
export class RotateParameters {
@IsAxisAlignedRotation()
@ApiProperty({ description: 'Rotation angle in degrees' })
angle!: number;
}
@ExtraModel()
export class MirrorParameters {
@IsEnum(MirrorAxis)
@ApiProperty({ enum: MirrorAxis, enumName: 'MirrorAxis', description: 'Axis to mirror along' })
@@ -67,6 +63,7 @@ export type AssetEditActionItem =
parameters: MirrorParameters;
};
@ApiExtraModels(CropParameters, RotateParameters, MirrorParameters)
export class AssetEditActionItemDto {
@ValidateEnum({ name: 'AssetEditAction', enum: AssetEditAction, description: 'Type of edit action to perform' })
action!: AssetEditAction;

View File

@@ -1,48 +0,0 @@
import { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsInt, IsOptional, IsUUID, Min } from 'class-validator';
import { IntegrityReportType } from 'src/enum';
import { ValidateEnum } from 'src/validation';
export class IntegrityReportSummaryResponseDto {
@ApiProperty({ type: 'integer' })
[IntegrityReportType.ChecksumFail]!: number;
@ApiProperty({ type: 'integer' })
[IntegrityReportType.MissingFile]!: number;
@ApiProperty({ type: 'integer' })
[IntegrityReportType.UntrackedFile]!: number;
}
export class IntegrityGetReportDto {
@ValidateEnum({ enum: IntegrityReportType, name: 'IntegrityReportType' })
type!: IntegrityReportType;
@ApiPropertyOptional({ description: 'Cursor for pagination', default: 1 })
@IsOptional()
@IsUUID()
cursor?: string;
@ApiPropertyOptional({ description: 'Number of items per page', default: 500 })
@IsInt()
@Min(1)
@IsOptional()
@Type(() => Number)
limit?: number;
}
export class IntegrityDeleteReportDto {
@ValidateEnum({ enum: IntegrityReportType, name: 'IntegrityReportType' })
type!: IntegrityReportType;
}
class IntegrityReportDto {
id!: string;
@ValidateEnum({ enum: IntegrityReportType, name: 'IntegrityReportType' })
type!: IntegrityReportType;
path!: string;
}
export class IntegrityReportResponseDto {
items!: IntegrityReportDto[];
nextCursor?: string;
}

View File

@@ -71,9 +71,6 @@ export class QueuesResponseLegacyDto implements Record<QueueName, QueueResponseL
@ApiProperty({ type: QueueResponseLegacyDto })
[QueueName.Workflow]!: QueueResponseLegacyDto;
@ApiProperty({ type: QueueResponseLegacyDto })
[QueueName.IntegrityCheck]!: QueueResponseLegacyDto;
@ApiProperty({ type: QueueResponseLegacyDto })
[QueueName.Editor]!: QueueResponseLegacyDto;
}

View File

@@ -2,7 +2,7 @@ import { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsInt, IsNotEmpty, IsString, Max, Min } from 'class-validator';
import { Place } from 'src/database';
import { HistoryBuilder } from 'src/decorators';
import { HistoryBuilder, Property } from 'src/decorators';
import { AlbumResponseDto } from 'src/dtos/album.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetOrder, AssetType, AssetVisibility } from 'src/enum';
@@ -103,12 +103,21 @@ class BaseSearchDto {
@ValidateUUID({ each: true, optional: true, description: 'Filter by album IDs' })
albumIds?: string[];
@ApiPropertyOptional({ type: 'number', description: 'Filter by rating', minimum: -1, maximum: 5 })
@Optional()
@Property({
type: 'number',
description: 'Filter by rating [1-5], or null for unrated',
minimum: -1,
maximum: 5,
history: new HistoryBuilder()
.added('v1')
.stable('v2')
.updated('v2.6.0', 'Using -1 as a rating is deprecated and will be removed in the next major version.'),
})
@Optional({ nullable: true })
@IsInt()
@Max(5)
@Min(-1)
rating?: number;
rating?: number | null;
@ApiPropertyOptional({ description: 'Filter by OCR text content' })
@IsString()

View File

@@ -2,6 +2,7 @@
import { ApiProperty } from '@nestjs/swagger';
import { ArrayMaxSize, IsInt, IsPositive, IsString } from 'class-validator';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetEditAction } from 'src/dtos/editing.dto';
import {
AlbumUserRole,
AssetOrder,
@@ -218,6 +219,24 @@ export class SyncAssetExifV1 {
fps!: number | null;
}
@ExtraModel()
export class SyncAssetEditV1 {
id!: string;
assetId!: string;
@ValidateEnum({ enum: AssetEditAction, name: 'AssetEditAction' })
action!: AssetEditAction;
parameters!: object;
@ApiProperty({ type: 'integer' })
sequence!: number;
}
@ExtraModel()
export class SyncAssetEditDeleteV1 {
editId!: string;
}
@ExtraModel()
export class SyncAssetMetadataV1 {
@ApiProperty({ description: 'Asset ID' })
@@ -480,6 +499,8 @@ export type SyncItem = {
[SyncEntityType.AssetMetadataV1]: SyncAssetMetadataV1;
[SyncEntityType.AssetMetadataDeleteV1]: SyncAssetMetadataDeleteV1;
[SyncEntityType.AssetExifV1]: SyncAssetExifV1;
[SyncEntityType.AssetEditV1]: SyncAssetEditV1;
[SyncEntityType.AssetEditDeleteV1]: SyncAssetEditDeleteV1;
[SyncEntityType.PartnerAssetV1]: SyncAssetV1;
[SyncEntityType.PartnerAssetBackfillV1]: SyncAssetV1;
[SyncEntityType.PartnerAssetDeleteV1]: SyncAssetDeleteV1;

View File

@@ -38,7 +38,6 @@ const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
const isOAuthOverrideEnabled = (config: SystemConfigOAuthDto) => config.mobileOverrideEnabled;
const isEmailNotificationEnabled = (config: SystemConfigSmtpDto) => config.enabled;
const isDatabaseBackupEnabled = (config: DatabaseBackupConfig) => config.enabled;
const isEnabledProperty = (config: { enabled: boolean }) => config.enabled;
export class DatabaseBackupConfig {
@ValidateBoolean({ description: 'Enabled' })
@@ -156,42 +155,6 @@ export class SystemConfigFFmpegDto {
tonemap!: ToneMapping;
}
class SystemConfigIntegrityJob {
@ValidateBoolean()
enabled!: boolean;
@ValidateIf(isEnabledProperty)
@IsNotEmpty()
@IsCronExpression()
@IsString()
cronExpression!: string;
}
class SystemConfigIntegrityChecksumJob extends SystemConfigIntegrityJob {
@IsInt()
timeLimit!: number;
@IsNumber()
percentageLimit!: number;
}
class SystemConfigIntegrityChecks {
@Type(() => SystemConfigIntegrityJob)
@ValidateNested()
@IsObject()
missingFiles!: SystemConfigIntegrityJob;
@Type(() => SystemConfigIntegrityJob)
@ValidateNested()
@IsObject()
untrackedFiles!: SystemConfigIntegrityJob;
@Type(() => SystemConfigIntegrityChecksumJob)
@ValidateNested()
@IsObject()
checksumFiles!: SystemConfigIntegrityChecksumJob;
}
class JobSettingsDto {
@IsInt()
@IsPositive()
@@ -282,12 +245,6 @@ class SystemConfigJobDto implements Record<ConcurrentQueueName, JobSettingsDto>
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.IntegrityCheck]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.Editor]!: JobSettingsDto;
}
@@ -758,13 +715,6 @@ export class SystemConfigDto implements SystemConfig {
@IsObject()
ffmpeg!: SystemConfigFFmpegDto;
// Description lives on schema to avoid duplication
@ApiProperty({ description: undefined })
@Type(() => SystemConfigIntegrityChecks)
@ValidateNested()
@IsObject()
integrityChecks!: SystemConfigIntegrityChecks;
// Description lives on schema to avoid duplication
@ApiProperty({ description: undefined })
@Type(() => SystemConfigLoggingDto)

View File

@@ -317,7 +317,6 @@ export enum SystemMetadataKey {
SystemFlags = 'system-flags',
VersionCheckState = 'version-check-state',
License = 'license',
IntegrityChecksumCheckpoint = 'integrity-checksum-checkpoint',
}
export enum UserMetadataKey {
@@ -361,12 +360,6 @@ export enum SourceType {
Manual = 'manual',
}
export enum IntegrityReportType {
UntrackedFile = 'untracked_file',
MissingFile = 'missing_file',
ChecksumFail = 'checksum_mismatch',
}
export enum ManualJobName {
PersonCleanup = 'person-cleanup',
TagCleanup = 'tag-cleanup',
@@ -374,15 +367,6 @@ export enum ManualJobName {
MemoryCleanup = 'memory-cleanup',
MemoryCreate = 'memory-create',
BackupDatabase = 'backup-database',
IntegrityMissingFiles = `integrity-missing-files`,
IntegrityUntrackedFiles = `integrity-untracked-files`,
IntegrityChecksumFiles = `integrity-checksum-mismatch`,
IntegrityMissingFilesRefresh = `integrity-missing-files-refresh`,
IntegrityUntrackedFilesRefresh = `integrity-untracked-files-refresh`,
IntegrityChecksumFilesRefresh = `integrity-checksum-mismatch-refresh`,
IntegrityMissingFilesDeleteAll = `integrity-missing-files-delete-all`,
IntegrityUntrackedFilesDeleteAll = `integrity-untracked-files-delete-all`,
IntegrityChecksumFilesDeleteAll = `integrity-checksum-mismatch-delete-all`,
}
export enum AssetPathType {
@@ -582,7 +566,6 @@ export enum QueueName {
BackupDatabase = 'backupDatabase',
Ocr = 'ocr',
Workflow = 'workflow',
IntegrityCheck = 'integrityCheck',
Editor = 'editor',
}
@@ -673,18 +656,6 @@ export enum JobName {
// Workflow
WorkflowRun = 'WorkflowRun',
// Integrity
IntegrityUntrackedFilesQueueAll = 'IntegrityUntrackedFilesQueueAll',
IntegrityUntrackedFiles = 'IntegrityUntrackedFiles',
IntegrityUntrackedFilesRefresh = 'IntegrityUntrackedRefresh',
IntegrityMissingFilesQueueAll = 'IntegrityMissingFilesQueueAll',
IntegrityMissingFiles = 'IntegrityMissingFiles',
IntegrityMissingFilesRefresh = 'IntegrityMissingFilesRefresh',
IntegrityChecksumFiles = 'IntegrityChecksumFiles',
IntegrityChecksumFilesRefresh = 'IntegrityChecksumFilesRefresh',
IntegrityDeleteReportType = 'IntegrityDeleteReportType',
IntegrityDeleteReports = 'IntegrityDeleteReports',
}
export enum QueueCommand {
@@ -728,7 +699,6 @@ export enum DatabaseLock {
BackupDatabase = 42,
MaintenanceOperation = 621,
MemoryCreation = 777,
IntegrityCheck = 67,
}
export enum MaintenanceAction {
@@ -750,6 +720,7 @@ export enum SyncRequestType {
AlbumAssetExifsV1 = 'AlbumAssetExifsV1',
AssetsV1 = 'AssetsV1',
AssetExifsV1 = 'AssetExifsV1',
AssetEditsV1 = 'AssetEditsV1',
AssetMetadataV1 = 'AssetMetadataV1',
AuthUsersV1 = 'AuthUsersV1',
MemoriesV1 = 'MemoriesV1',
@@ -775,6 +746,8 @@ export enum SyncEntityType {
AssetV1 = 'AssetV1',
AssetDeleteV1 = 'AssetDeleteV1',
AssetExifV1 = 'AssetExifV1',
AssetEditV1 = 'AssetEditV1',
AssetEditDeleteV1 = 'AssetEditDeleteV1',
AssetMetadataV1 = 'AssetMetadataV1',
AssetMetadataDeleteV1 = 'AssetMetadataDeleteV1',
@@ -881,7 +854,6 @@ export enum ApiTag {
Download = 'Download',
Duplicates = 'Duplicates',
Faces = 'Faces',
Integrity = 'Integrity (admin)',
Jobs = 'Jobs',
Libraries = 'Libraries',
Maintenance = 'Maintenance (admin)',

View File

@@ -18,3 +18,17 @@ where
"assetId" = $1
order by
"sequence" asc
-- AssetEditRepository.getWithSyncInfo
select
"asset_edit"."id",
"asset_edit"."assetId",
"asset_edit"."sequence",
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"assetId" = $1
order by
"sequence" asc

View File

@@ -1,178 +0,0 @@
-- NOTE: This file is auto generated by ./sql-generator
-- IntegrityRepository.getById
select
"integrity_report".*
from
"integrity_report"
where
"id" = $1
-- IntegrityRepository.getIntegrityReportSummary
select
"type",
count(*) as "count"
from
"integrity_report"
group by
"type"
-- IntegrityRepository.getIntegrityReport
select
"id",
"type",
"path",
"assetId",
"fileAssetId",
"createdAt"
from
"integrity_report"
where
"type" = $1
and "id" <= $2
order by
"id" desc
limit
$3
-- IntegrityRepository.getAssetPathsByPaths
select
"originalPath",
"encodedVideoPath"
from
"asset"
where
(
"originalPath" in $1
or "encodedVideoPath" in $2
)
-- IntegrityRepository.getAssetFilePathsByPaths
select
"path"
from
"asset_file"
where
"path" in $1
-- IntegrityRepository.getAssetCount
select
count(*) as "count"
from
"asset"
-- IntegrityRepository.streamAllAssetPaths
select
"originalPath",
"encodedVideoPath"
from
"asset"
-- IntegrityRepository.streamAllAssetFilePaths
select
"path"
from
"asset_file"
-- IntegrityRepository.streamAssetPaths
select
"allPaths"."path" as "path",
"allPaths"."assetId",
"allPaths"."fileAssetId",
"integrity_report"."id" as "reportId"
from
(
select
"asset"."originalPath" as "path",
"asset"."id" as "assetId",
null::uuid as "fileAssetId"
from
"asset"
where
"asset"."deletedAt" is null
union all
select
"asset"."encodedVideoPath" as "path",
"asset"."id" as "assetId",
null::uuid as "fileAssetId"
from
"asset"
where
"asset"."deletedAt" is null
and "asset"."encodedVideoPath" is not null
and "asset"."encodedVideoPath" != ''
union all
select
"path",
null::uuid as "assetId",
"asset_file"."id" as "fileAssetId"
from
"asset_file"
) as "allPaths"
left join "integrity_report" on "integrity_report"."type" = $1
and (
"integrity_report"."assetId" = "allPaths"."assetId"
or "integrity_report"."fileAssetId" = "allPaths"."fileAssetId"
)
-- IntegrityRepository.streamAssetChecksums
select
"asset"."originalPath",
"asset"."checksum",
"asset"."createdAt",
"asset"."id" as "assetId",
"integrity_report"."id" as "reportId"
from
"asset"
left join "integrity_report" on "integrity_report"."assetId" = "asset"."id"
and "integrity_report"."type" = $1
where
"asset"."deletedAt" is null
and "createdAt" >= $2
and "createdAt" <= $3
order by
"createdAt" asc
-- IntegrityRepository.streamIntegrityReports
select
"id",
"type",
"path",
"assetId",
"fileAssetId"
from
"integrity_report"
where
"type" = $1
order by
"createdAt" desc
-- IntegrityRepository.streamIntegrityReportsWithAssetChecksum
select
"integrity_report"."id" as "reportId",
"integrity_report"."path"
from
"integrity_report"
where
"integrity_report"."type" = $1
-- IntegrityRepository.streamIntegrityReportsByProperty
select
"id",
"path",
"assetId",
"fileAssetId"
from
"integrity_report"
where
"abcdefghi" is not null
-- IntegrityRepository.deleteById
delete from "integrity_report"
where
"id" = $1
-- IntegrityRepository.deleteByIds
delete from "integrity_report"
where
"id" in $1

View File

@@ -514,6 +514,38 @@ where
order by
"asset_exif"."updateId" asc
-- SyncRepository.assetEdit.getDeletes
select
"asset_edit_audit"."id",
"editId"
from
"asset_edit_audit" as "asset_edit_audit"
inner join "asset" on "asset"."id" = "asset_edit_audit"."assetId"
where
"asset_edit_audit"."id" < $1
and "asset_edit_audit"."id" > $2
and "asset"."ownerId" = $3
order by
"asset_edit_audit"."id" asc
-- SyncRepository.assetEdit.getUpserts
select
"asset_edit"."id",
"asset_edit"."assetId",
"asset_edit"."sequence",
"asset_edit"."action",
"asset_edit"."parameters",
"asset_edit"."updateId"
from
"asset_edit" as "asset_edit"
inner join "asset" on "asset"."id" = "asset_edit"."assetId"
where
"asset_edit"."updateId" < $1
and "asset_edit"."updateId" > $2
and "asset"."ownerId" = $3
order by
"asset_edit"."updateId" asc
-- SyncRepository.assetFace.getDeletes
select
"asset_face_audit"."id",

View File

@@ -1,6 +1,7 @@
import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetEditActionItem, AssetEditActionItemResponseDto } from 'src/dtos/editing.dto';
import { DB } from 'src/schema';
@@ -9,9 +10,7 @@ import { DB } from 'src/schema';
export class AssetEditRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({
params: [DummyValue.UUID],
})
@GenerateSql({ params: [DummyValue.UUID] })
replaceAll(assetId: string, edits: AssetEditActionItem[]): Promise<AssetEditActionItemResponseDto[]> {
return this.db.transaction().execute(async (trx) => {
await trx.deleteFrom('asset_edit').where('assetId', '=', assetId).execute();
@@ -28,9 +27,7 @@ export class AssetEditRepository {
});
}
@GenerateSql({
params: [DummyValue.UUID],
})
@GenerateSql({ params: [DummyValue.UUID] })
getAll(assetId: string): Promise<AssetEditActionItemResponseDto[]> {
return this.db
.selectFrom('asset_edit')
@@ -39,4 +36,14 @@ export class AssetEditRepository {
.orderBy('sequence', 'asc')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getWithSyncInfo(assetId: string) {
return this.db
.selectFrom('asset_edit')
.select(columns.syncAssetEdit)
.where('assetId', '=', assetId)
.orderBy('sequence', 'asc')
.execute();
}
}

View File

@@ -16,7 +16,6 @@ import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { IntegrityRepository } from 'src/repositories/integrity.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LibraryRepository } from 'src/repositories/library.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -71,7 +70,6 @@ export const repositories = [
DuplicateRepository,
EmailRepository,
EventRepository,
IntegrityRepository,
JobRepository,
LibraryRepository,
LoggingRepository,

View File

@@ -1,222 +0,0 @@
import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DummyValue, GenerateSql } from 'src/decorators';
import { IntegrityReportType } from 'src/enum';
import { DB } from 'src/schema';
import { IntegrityReportTable } from 'src/schema/tables/integrity-report.table';
export type ReportPaginationOptions = {
cursor?: string;
limit: number;
};
@Injectable()
export class IntegrityRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
create(dto: Insertable<IntegrityReportTable> | Insertable<IntegrityReportTable>[]) {
return this.db
.insertInto('integrity_report')
.values(dto)
.onConflict((oc) =>
oc.columns(['path', 'type']).doUpdateSet({
assetId: (eb) => eb.ref('excluded.assetId'),
fileAssetId: (eb) => eb.ref('excluded.fileAssetId'),
}),
)
.returningAll()
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.STRING] })
getById(id: string) {
return this.db
.selectFrom('integrity_report')
.selectAll('integrity_report')
.where('id', '=', id)
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [] })
async getIntegrityReportSummary() {
const counts = await this.db
.selectFrom('integrity_report')
.select(['type', this.db.fn.countAll<number>().as('count')])
.groupBy('type')
.execute();
return Object.fromEntries(
Object.values(IntegrityReportType).map((type) => [type, counts.find((count) => count.type === type)?.count || 0]),
) as Record<IntegrityReportType, number>;
}
@GenerateSql({ params: [{ cursor: DummyValue.NUMBER, limit: 100 }, DummyValue.STRING] })
async getIntegrityReport(pagination: ReportPaginationOptions, type: IntegrityReportType) {
const items = await this.db
.selectFrom('integrity_report')
.select(['id', 'type', 'path', 'assetId', 'fileAssetId', 'createdAt'])
.where('type', '=', type)
.$if(pagination.cursor !== undefined, (eb) => eb.where('id', '<=', pagination.cursor!))
.orderBy('id', 'desc')
.limit(pagination.limit + 1)
.execute();
return {
items: items.slice(0, pagination.limit),
nextCursor: items[pagination.limit]?.id as string | undefined,
};
}
@GenerateSql({ params: [DummyValue.STRING] })
getAssetPathsByPaths(paths: string[]) {
return this.db
.selectFrom('asset')
.select(['originalPath', 'encodedVideoPath'])
.where((eb) => eb.or([eb('originalPath', 'in', paths), eb('encodedVideoPath', 'in', paths)]))
.execute();
}
@GenerateSql({ params: [DummyValue.STRING] })
getAssetFilePathsByPaths(paths: string[]) {
return this.db.selectFrom('asset_file').select(['path']).where('path', 'in', paths).execute();
}
@GenerateSql({ params: [] })
getAssetCount() {
return this.db
.selectFrom('asset')
.select((eb) => eb.fn.countAll<number>().as('count'))
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [], stream: true })
streamAllAssetPaths() {
return this.db.selectFrom('asset').select(['originalPath', 'encodedVideoPath']).stream();
}
@GenerateSql({ params: [], stream: true })
streamAllAssetFilePaths() {
return this.db.selectFrom('asset_file').select(['path']).stream();
}
@GenerateSql({ params: [], stream: true })
streamAssetPaths() {
return this.db
.selectFrom((eb) =>
eb
.selectFrom('asset')
.where('asset.deletedAt', 'is', null)
.select(['asset.originalPath as path'])
.select((eb) => [
eb.ref('asset.id').$castTo<string | null>().as('assetId'),
sql<string | null>`null::uuid`.as('fileAssetId'),
])
.unionAll(
eb
.selectFrom('asset')
.where('asset.deletedAt', 'is', null)
.select((eb) => [
eb.ref('asset.encodedVideoPath').$castTo<string>().as('path'),
eb.ref('asset.id').$castTo<string | null>().as('assetId'),
sql<string | null>`null::uuid`.as('fileAssetId'),
])
.where('asset.encodedVideoPath', 'is not', null)
.where('asset.encodedVideoPath', '!=', sql<string>`''`),
)
.unionAll(
eb
.selectFrom('asset_file')
.select(['path'])
.select((eb) => [
sql<string | null>`null::uuid`.as('assetId'),
eb.ref('asset_file.id').$castTo<string | null>().as('fileAssetId'),
]),
)
.as('allPaths'),
)
.leftJoin('integrity_report', (join) =>
join
.on('integrity_report.type', '=', IntegrityReportType.UntrackedFile)
.on((eb) =>
eb.or([
eb('integrity_report.assetId', '=', eb.ref('allPaths.assetId')),
eb('integrity_report.fileAssetId', '=', eb.ref('allPaths.fileAssetId')),
]),
),
)
.select(['allPaths.path as path', 'allPaths.assetId', 'allPaths.fileAssetId', 'integrity_report.id as reportId'])
.stream() as AsyncIterableIterator<
{ path: string; reportId: string | null } & (
| { assetId: string; fileAssetId: null }
| { assetId: null; fileAssetId: string }
)
>;
}
@GenerateSql({ params: [DummyValue.DATE, DummyValue.DATE], stream: true })
streamAssetChecksums(startMarker?: Date, endMarker?: Date) {
return this.db
.selectFrom('asset')
.where('asset.deletedAt', 'is', null)
.leftJoin('integrity_report', (join) =>
join
.onRef('integrity_report.assetId', '=', 'asset.id')
.on('integrity_report.type', '=', IntegrityReportType.ChecksumFail),
)
.select([
'asset.originalPath',
'asset.checksum',
'asset.createdAt',
'asset.id as assetId',
'integrity_report.id as reportId',
])
.$if(startMarker !== undefined, (qb) => qb.where('createdAt', '>=', startMarker!))
.$if(endMarker !== undefined, (qb) => qb.where('createdAt', '<=', endMarker!))
.orderBy('createdAt', 'asc')
.stream();
}
@GenerateSql({ params: [DummyValue.STRING], stream: true })
streamIntegrityReports(type: IntegrityReportType) {
return this.db
.selectFrom('integrity_report')
.select(['id', 'type', 'path', 'assetId', 'fileAssetId'])
.where('type', '=', type)
.orderBy('createdAt', 'desc')
.stream();
}
@GenerateSql({ params: [DummyValue.STRING], stream: true })
streamIntegrityReportsWithAssetChecksum(type: IntegrityReportType) {
return this.db
.selectFrom('integrity_report')
.select(['integrity_report.id as reportId', 'integrity_report.path'])
.where('integrity_report.type', '=', type)
.$if(type === IntegrityReportType.ChecksumFail, (eb) =>
eb.leftJoin('asset', 'integrity_report.path', 'asset.originalPath').select('asset.checksum'),
)
.stream();
}
@GenerateSql({ params: [DummyValue.STRING], stream: true })
streamIntegrityReportsByProperty(property?: 'assetId' | 'fileAssetId', filterType?: IntegrityReportType) {
return this.db
.selectFrom('integrity_report')
.select(['id', 'path', 'assetId', 'fileAssetId'])
.$if(filterType !== undefined, (eb) => eb.where('type', '=', filterType!))
.$if(property === undefined, (eb) => eb.where('assetId', 'is', null).where('fileAssetId', 'is', null))
.$if(property !== undefined, (eb) => eb.where(property!, 'is not', null))
.stream();
}
@GenerateSql({ params: [DummyValue.STRING] })
deleteById(id: string) {
return this.db.deleteFrom('integrity_report').where('id', '=', id).execute();
}
@GenerateSql({ params: [DummyValue.STRING] })
deleteByIds(ids: string[]) {
return this.db.deleteFrom('integrity_report').where('id', 'in', ids).execute();
}
}

View File

@@ -107,7 +107,7 @@ export class MediaRepository {
ExposureTime: tags.exposureTime,
ProfileDescription: tags.profileDescription,
ColorSpace: tags.colorspace,
Rating: tags.rating,
Rating: tags.rating === null ? 0 : tags.rating,
// specially convert Orientation to numeric Orientation# for exiftool
'Orientation#': tags.orientation ? Number(tags.orientation) : undefined,
};
@@ -243,23 +243,26 @@ export class MediaRepository {
bitrate: this.parseInt(results.format.bit_rate),
},
videoStreams: results.streams
.filter((stream) => stream.codec_type === 'video')
.filter((stream) => !stream.disposition?.attached_pic)
.map((stream) => ({
index: stream.index,
height: this.parseInt(stream.height),
width: this.parseInt(stream.width),
codecName: stream.codec_name === 'h265' ? 'hevc' : stream.codec_name,
codecType: stream.codec_type,
frameCount: this.parseInt(options?.countFrames ? stream.nb_read_packets : stream.nb_frames),
rotation: this.parseInt(stream.rotation),
isHDR: stream.color_transfer === 'smpte2084' || stream.color_transfer === 'arib-std-b67',
bitrate: this.parseInt(stream.bit_rate),
pixelFormat: stream.pix_fmt || 'yuv420p',
colorPrimaries: stream.color_primaries,
colorSpace: stream.color_space,
colorTransfer: stream.color_transfer,
})),
.filter((stream) => stream.codec_type === 'video' && !stream.disposition?.attached_pic)
.map((stream) => {
const height = this.parseInt(stream.height);
const dar = this.getDar(stream.display_aspect_ratio);
return {
index: stream.index,
height,
width: dar ? Math.round(height * dar) : this.parseInt(stream.width),
codecName: stream.codec_name === 'h265' ? 'hevc' : stream.codec_name,
codecType: stream.codec_type,
frameCount: this.parseInt(options?.countFrames ? stream.nb_read_packets : stream.nb_frames),
rotation: this.parseInt(stream.rotation),
isHDR: stream.color_transfer === 'smpte2084' || stream.color_transfer === 'arib-std-b67',
bitrate: this.parseInt(stream.bit_rate),
pixelFormat: stream.pix_fmt || 'yuv420p',
colorPrimaries: stream.color_primaries,
colorSpace: stream.color_space,
colorTransfer: stream.color_transfer,
};
}),
audioStreams: results.streams
.filter((stream) => stream.codec_type === 'audio')
.map((stream) => ({
@@ -352,4 +355,15 @@ export class MediaRepository {
private parseFloat(value: string | number | undefined): number {
return Number.parseFloat(value as string) || 0;
}
private getDar(dar: string | undefined): number {
if (dar) {
const [darW, darH] = dar.split(':').map(Number);
if (darW && darH) {
return darW / darH;
}
}
return 0;
}
}

View File

@@ -53,6 +53,7 @@ export class SyncRepository {
albumUser: AlbumUserSync;
asset: AssetSync;
assetExif: AssetExifSync;
assetEdit: AssetEditSync;
assetFace: AssetFaceSync;
assetMetadata: AssetMetadataSync;
authUser: AuthUserSync;
@@ -75,6 +76,7 @@ export class SyncRepository {
this.albumUser = new AlbumUserSync(this.db);
this.asset = new AssetSync(this.db);
this.assetExif = new AssetExifSync(this.db);
this.assetEdit = new AssetEditSync(this.db);
this.assetFace = new AssetFaceSync(this.db);
this.assetMetadata = new AssetMetadataSync(this.db);
this.authUser = new AuthUserSync(this.db);
@@ -91,7 +93,7 @@ export class SyncRepository {
}
}
class BaseSync {
export class BaseSync {
constructor(protected db: Kysely<DB>) {}
protected backfillQuery<T extends keyof DB>(t: T, { nowId, beforeUpdateId, afterUpdateId }: SyncBackfillOptions) {
@@ -501,6 +503,30 @@ class AssetExifSync extends BaseSync {
}
}
class AssetEditSync extends BaseSync {
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getDeletes(options: SyncQueryOptions) {
return this.auditQuery('asset_edit_audit', options)
.select(['asset_edit_audit.id', 'editId'])
.innerJoin('asset', 'asset.id', 'asset_edit_audit.assetId')
.where('asset.ownerId', '=', options.userId)
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_edit_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('asset_edit', options)
.select([...columns.syncAssetEdit, 'asset_edit.updateId'])
.innerJoin('asset', 'asset.id', 'asset_edit.assetId')
.where('asset.ownerId', '=', options.userId)
.stream();
}
}
class MemorySync extends BaseSync {
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getDeletes(options: SyncQueryOptions) {

View File

@@ -11,7 +11,7 @@ import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { NotificationDto } from 'src/dtos/notification.dto';
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
import { SyncAssetExifV1, SyncAssetV1 } from 'src/dtos/sync.dto';
import { SyncAssetEditV1, SyncAssetExifV1, SyncAssetV1 } from 'src/dtos/sync.dto';
import { AppRestartEvent, ArgsOf, EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { handlePromiseError } from 'src/utils/misc';
@@ -37,7 +37,7 @@ export interface ClientEventMap {
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
AppRestartV1: [AppRestartEvent];
AssetEditReadyV1: [{ asset: SyncAssetV1 }];
AssetEditReadyV1: [{ asset: SyncAssetV1; edit: SyncAssetEditV1[] }];
}
export type AuthFn = (client: Socket) => Promise<AuthDto>;

View File

@@ -286,3 +286,16 @@ export const asset_edit_delete = registerFunction({
END
`,
});
export const asset_edit_audit = registerFunction({
name: 'asset_edit_audit',
returnType: 'TRIGGER',
language: 'PLPGSQL',
body: `
BEGIN
INSERT INTO asset_edit_audit ("editId", "assetId")
SELECT "id", "assetId"
FROM OLD;
RETURN NULL;
END`,
});

View File

@@ -29,6 +29,7 @@ import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
import { ApiKeyTable } from 'src/schema/tables/api-key.table';
import { AssetAuditTable } from 'src/schema/tables/asset-audit.table';
import { AssetEditAuditTable } from 'src/schema/tables/asset-edit-audit.table';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFaceAuditTable } from 'src/schema/tables/asset-face-audit.table';
@@ -42,7 +43,6 @@ import { AssetTable } from 'src/schema/tables/asset.table';
import { AuditTable } from 'src/schema/tables/audit.table';
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
import { GeodataPlacesTable } from 'src/schema/tables/geodata-places.table';
import { IntegrityReportTable } from 'src/schema/tables/integrity-report.table';
import { LibraryTable } from 'src/schema/tables/library.table';
import { MemoryAssetAuditTable } from 'src/schema/tables/memory-asset-audit.table';
import { MemoryAssetTable } from 'src/schema/tables/memory-asset.table';
@@ -89,6 +89,7 @@ export class ImmichDatabase {
ApiKeyTable,
AssetAuditTable,
AssetEditTable,
AssetEditAuditTable,
AssetFaceTable,
AssetFaceAuditTable,
AssetMetadataTable,
@@ -101,7 +102,6 @@ export class ImmichDatabase {
AssetExifTable,
FaceSearchTable,
GeodataPlacesTable,
IntegrityReportTable,
LibraryTable,
MemoryTable,
MemoryAuditTable,
@@ -186,6 +186,7 @@ export interface DB {
asset: AssetTable;
asset_audit: AssetAuditTable;
asset_edit: AssetEditTable;
asset_edit_audit: AssetEditAuditTable;
asset_exif: AssetExifTable;
asset_face: AssetFaceTable;
asset_face_audit: AssetFaceAuditTable;
@@ -202,8 +203,6 @@ export interface DB {
geodata_places: GeodataPlacesTable;
integrity_report: IntegrityReportTable;
library: LibraryTable;
memory: MemoryTable;

View File

@@ -1,24 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "integrity_report" (
"id" uuid NOT NULL DEFAULT immich_uuid_v7(),
"type" character varying NOT NULL,
"path" character varying NOT NULL,
"createdAt" timestamp with time zone NOT NULL DEFAULT now(),
"assetId" uuid,
"fileAssetId" uuid,
CONSTRAINT "integrity_report_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE CASCADE ON DELETE CASCADE,
CONSTRAINT "integrity_report_fileAssetId_fkey" FOREIGN KEY ("fileAssetId") REFERENCES "asset_file" ("id") ON UPDATE CASCADE ON DELETE CASCADE,
CONSTRAINT "integrity_report_type_path_uq" UNIQUE ("type", "path"),
CONSTRAINT "integrity_report_pkey" PRIMARY KEY ("id")
);`.execute(db);
await sql`CREATE INDEX "integrity_report_assetId_idx" ON "integrity_report" ("assetId");`.execute(db);
await sql`CREATE INDEX "integrity_report_fileAssetId_idx" ON "integrity_report" ("fileAssetId");`.execute(db);
await sql`CREATE INDEX "asset_createdAt_idx" ON "asset" ("createdAt");`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE "integrity_report";`.execute(db);
await sql`DROP INDEX "asset_createdAt_idx";`.execute(db);
}

View File

@@ -0,0 +1,9 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`UPDATE "asset_exif" SET "rating" = NULL WHERE "rating" = 0;`.execute(db);
}
export async function down(): Promise<void> {
// not supported
}

View File

@@ -0,0 +1,53 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE OR REPLACE FUNCTION asset_edit_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO asset_edit_audit ("editId", "assetId")
SELECT "id", "assetId"
FROM OLD;
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE TABLE "asset_edit_audit" (
"id" uuid NOT NULL DEFAULT immich_uuid_v7(),
"editId" uuid NOT NULL,
"assetId" uuid NOT NULL,
"deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(),
CONSTRAINT "asset_edit_audit_pkey" PRIMARY KEY ("id")
);`.execute(db);
await sql`CREATE INDEX "asset_edit_audit_assetId_idx" ON "asset_edit_audit" ("assetId");`.execute(db);
await sql`CREATE INDEX "asset_edit_audit_deletedAt_idx" ON "asset_edit_audit" ("deletedAt");`.execute(db);
await sql`ALTER TABLE "asset_edit" ADD "updatedAt" timestamp with time zone NOT NULL DEFAULT now();`.execute(db);
await sql`ALTER TABLE "asset_edit" ADD "updateId" uuid NOT NULL DEFAULT immich_uuid_v7();`.execute(db);
await sql`CREATE INDEX "asset_edit_updateId_idx" ON "asset_edit" ("updateId");`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "asset_edit_audit"
AFTER DELETE ON "asset_edit"
REFERENCING OLD TABLE AS "old"
FOR EACH STATEMENT
WHEN (pg_trigger_depth() = 0)
EXECUTE FUNCTION asset_edit_audit();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "asset_edit_updatedAt"
BEFORE UPDATE ON "asset_edit"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_asset_edit_audit', '{"type":"function","name":"asset_edit_audit","sql":"CREATE OR REPLACE FUNCTION asset_edit_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO asset_edit_audit (\\"editId\\", \\"assetId\\")\\n SELECT \\"id\\", \\"assetId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_audit', '{"type":"trigger","name":"asset_edit_audit","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_audit\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_updatedAt', '{"type":"trigger","name":"asset_edit_updatedAt","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_updatedAt\\"\\n BEFORE UPDATE ON \\"asset_edit\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TRIGGER "asset_edit_audit" ON "asset_edit";`.execute(db);
await sql`DROP TRIGGER "asset_edit_updatedAt" ON "asset_edit";`.execute(db);
await sql`DROP INDEX "asset_edit_updateId_idx";`.execute(db);
await sql`ALTER TABLE "asset_edit" DROP COLUMN "updatedAt";`.execute(db);
await sql`ALTER TABLE "asset_edit" DROP COLUMN "updateId";`.execute(db);
await sql`DROP TABLE "asset_edit_audit";`.execute(db);
await sql`DROP FUNCTION asset_edit_audit;`.execute(db);
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'function_asset_edit_audit';`.execute(db);
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_audit';`.execute(db);
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_updatedAt';`.execute(db);
}

View File

@@ -0,0 +1,17 @@
import { Column, CreateDateColumn, Generated, Table, Timestamp } from '@immich/sql-tools';
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
@Table('asset_edit_audit')
export class AssetEditAuditTable {
@PrimaryGeneratedUuidV7Column()
id!: Generated<string>;
@Column({ type: 'uuid' })
editId!: string;
@Column({ type: 'uuid', index: true })
assetId!: string;
@CreateDateColumn({ default: () => 'clock_timestamp()', index: true })
deletedAt!: Generated<Timestamp>;
}

View File

@@ -6,13 +6,17 @@ import {
Generated,
PrimaryGeneratedColumn,
Table,
Timestamp,
Unique,
UpdateDateColumn,
} from '@immich/sql-tools';
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { AssetEditAction, AssetEditParameters } from 'src/dtos/editing.dto';
import { asset_edit_delete, asset_edit_insert } from 'src/schema/functions';
import { asset_edit_audit, asset_edit_delete, asset_edit_insert } from 'src/schema/functions';
import { AssetTable } from 'src/schema/tables/asset.table';
@Table('asset_edit')
@UpdatedAtTrigger('asset_edit_updatedAt')
@AfterInsertTrigger({ scope: 'statement', function: asset_edit_insert, referencingNewTableAs: 'inserted_edit' })
@AfterDeleteTrigger({
scope: 'statement',
@@ -20,6 +24,12 @@ import { AssetTable } from 'src/schema/tables/asset.table';
referencingOldTableAs: 'deleted_edit',
when: 'pg_trigger_depth() = 0',
})
@AfterDeleteTrigger({
scope: 'statement',
function: asset_edit_audit,
referencingOldTableAs: 'old',
when: 'pg_trigger_depth() = 0',
})
@Unique({ columns: ['assetId', 'sequence'] })
export class AssetEditTable {
@PrimaryGeneratedColumn()
@@ -36,4 +46,10 @@ export class AssetEditTable {
@Column({ type: 'integer' })
sequence!: number;
@UpdateDateColumn()
updatedAt!: Generated<Timestamp>;
@UpdateIdColumn({ index: true })
updateId!: Generated<string>;
}

View File

@@ -104,7 +104,7 @@ export class AssetTable {
@UpdateDateColumn()
updatedAt!: Generated<Timestamp>;
@CreateDateColumn({ index: true })
@CreateDateColumn()
createdAt!: Generated<Timestamp>;
@Column({ index: true })

View File

@@ -1,27 +0,0 @@
import { Column, CreateDateColumn, ForeignKeyColumn, Generated, Table, Timestamp, Unique } from '@immich/sql-tools';
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
import { IntegrityReportType } from 'src/enum';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetTable } from 'src/schema/tables/asset.table';
@Table('integrity_report')
@Unique({ columns: ['type', 'path'] })
export class IntegrityReportTable {
@PrimaryGeneratedUuidV7Column()
id!: Generated<string>;
@Column()
type!: IntegrityReportType;
@Column()
path!: string;
@CreateDateColumn()
createdAt!: Generated<Timestamp>;
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: true })
assetId!: string | null;
@ForeignKeyColumn(() => AssetFileTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: true })
fileAssetId!: string | null;
}

View File

@@ -516,7 +516,7 @@ export class AssetService extends BaseService {
dateTimeOriginal?: string;
latitude?: number;
longitude?: number;
rating?: number;
rating?: number | null;
}) {
const { id, description, dateTimeOriginal, latitude, longitude, rating } = dto;
const writes = _.omitBy(
@@ -546,6 +546,7 @@ export class AssetService extends BaseService {
async getAssetEdits(auth: AuthDto, id: string): Promise<AssetEditsResponseDto> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
const edits = await this.assetEditRepository.getAll(id);
return {
assetId: id,
edits,

View File

@@ -23,7 +23,6 @@ import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { IntegrityRepository } from 'src/repositories/integrity.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LibraryRepository } from 'src/repositories/library.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -82,7 +81,6 @@ export const BASE_SERVICE_DEPENDENCIES = [
DuplicateRepository,
EmailRepository,
EventRepository,
IntegrityRepository,
JobRepository,
LibraryRepository,
MachineLearningRepository,
@@ -142,7 +140,6 @@ export class BaseService {
protected duplicateRepository: DuplicateRepository,
protected emailRepository: EmailRepository,
protected eventRepository: EventRepository,
protected integrityRepository: IntegrityRepository,
protected jobRepository: JobRepository,
protected libraryRepository: LibraryRepository,
protected machineLearningRepository: MachineLearningRepository,

View File

@@ -12,7 +12,6 @@ import { DatabaseBackupService } from 'src/services/database-backup.service';
import { DatabaseService } from 'src/services/database.service';
import { DownloadService } from 'src/services/download.service';
import { DuplicateService } from 'src/services/duplicate.service';
import { IntegrityService } from 'src/services/integrity.service';
import { JobService } from 'src/services/job.service';
import { LibraryService } from 'src/services/library.service';
import { MaintenanceService } from 'src/services/maintenance.service';
@@ -63,7 +62,6 @@ export const services = [
DatabaseService,
DownloadService,
DuplicateService,
IntegrityService,
JobService,
LibraryService,
MaintenanceService,

View File

@@ -1,29 +0,0 @@
import { IntegrityService } from 'src/services/integrity.service';
import { newTestService, ServiceMocks } from 'test/utils';
describe(IntegrityService.name, () => {
let sut: IntegrityService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(IntegrityService));
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('handleDeleteAllIntegrityReports', () => {
beforeEach(() => {
mocks.integrityReport.streamIntegrityReportsByProperty.mockReturnValue((function* () {})() as never);
});
it('should query all property types when no type specified', async () => {
await sut.handleDeleteAllIntegrityReports({});
expect(mocks.integrityReport.streamIntegrityReportsByProperty).toHaveBeenCalledWith(undefined, undefined);
expect(mocks.integrityReport.streamIntegrityReportsByProperty).toHaveBeenCalledWith('assetId', undefined);
expect(mocks.integrityReport.streamIntegrityReportsByProperty).toHaveBeenCalledWith('fileAssetId', undefined);
});
});
});

View File

@@ -1,717 +0,0 @@
import { Injectable } from '@nestjs/common';
import { createHash } from 'node:crypto';
import { basename } from 'node:path';
import { Readable, Writable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import { JOBS_LIBRARY_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import {
IntegrityGetReportDto,
IntegrityReportResponseDto,
IntegrityReportSummaryResponseDto,
} from 'src/dtos/integrity.dto';
import {
AssetStatus,
CacheControl,
DatabaseLock,
ImmichWorker,
IntegrityReportType,
JobName,
JobStatus,
QueueName,
StorageFolder,
SystemMetadataKey,
} from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import {
IIntegrityDeleteReportsJob,
IIntegrityDeleteReportTypeJob,
IIntegrityJob,
IIntegrityMissingFilesJob,
IIntegrityPathWithChecksumJob,
IIntegrityPathWithReportJob,
IIntegrityUntrackedFilesJob,
} from 'src/types';
import { ImmichFileResponse } from 'src/utils/file';
import { handlePromiseError } from 'src/utils/misc';
/**
* Untracked Files:
* Files are detected in /data/encoded-video, /data/library, /data/upload
* Checked against the asset table
* Files are detected in /data/thumbs
* Checked against the asset_file table
*
* * Can perform download or delete of files
*
* Missing Files:
* Paths are queried from asset(originalPath, encodedVideoPath), asset_file(path)
* Check whether files exist on disk
*
* * Reports must include origin (asset or asset_file) & ID for further action
* * Can perform trash (asset) or delete (asset_file)
*
* Checksum Mismatch:
* Paths & checksums are queried from asset(originalPath, checksum)
* Check whether files match checksum, missing files ignored
*
* * Reports must include origin (as above) for further action
* * Can perform download or trash (asset)
*/
@Injectable()
export class IntegrityService extends BaseService {
private integrityLock = false;
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({
newConfig: {
integrityChecks: { untrackedFiles, missingFiles, checksumFiles },
},
}: ArgOf<'ConfigInit'>) {
this.integrityLock = await this.databaseRepository.tryLock(DatabaseLock.IntegrityCheck);
if (this.integrityLock) {
this.cronRepository.create({
name: 'integrityUntrackedFiles',
expression: untrackedFiles.cronExpression,
onTick: () =>
handlePromiseError(
this.jobRepository.queue({ name: JobName.IntegrityUntrackedFilesQueueAll, data: {} }),
this.logger,
),
start: untrackedFiles.enabled,
});
this.cronRepository.create({
name: 'integrityMissingFiles',
expression: missingFiles.cronExpression,
onTick: () =>
handlePromiseError(
this.jobRepository.queue({ name: JobName.IntegrityMissingFilesQueueAll, data: {} }),
this.logger,
),
start: missingFiles.enabled,
});
this.cronRepository.create({
name: 'integrityChecksumFiles',
expression: checksumFiles.cronExpression,
onTick: () =>
handlePromiseError(this.jobRepository.queue({ name: JobName.IntegrityChecksumFiles, data: {} }), this.logger),
start: checksumFiles.enabled,
});
}
}
@OnEvent({ name: 'ConfigUpdate', server: true })
onConfigUpdate({
newConfig: {
integrityChecks: { untrackedFiles, missingFiles, checksumFiles },
},
}: ArgOf<'ConfigUpdate'>) {
if (!this.integrityLock) {
return;
}
this.cronRepository.update({
name: 'integrityUntrackedFiles',
expression: untrackedFiles.cronExpression,
start: untrackedFiles.enabled,
});
this.cronRepository.update({
name: 'integrityMissingFiles',
expression: missingFiles.cronExpression,
start: missingFiles.enabled,
});
this.cronRepository.update({
name: 'integrityChecksumFiles',
expression: checksumFiles.cronExpression,
start: checksumFiles.enabled,
});
}
getIntegrityReportSummary(): Promise<IntegrityReportSummaryResponseDto> {
return this.integrityRepository.getIntegrityReportSummary();
}
getIntegrityReport(dto: IntegrityGetReportDto): Promise<IntegrityReportResponseDto> {
return this.integrityRepository.getIntegrityReport({ cursor: dto.cursor, limit: dto.limit || 100 }, dto.type);
}
getIntegrityReportCsv(type: IntegrityReportType): Readable {
const items = this.integrityRepository.streamIntegrityReports(type);
// very rudimentary csv serialiser
async function* generator() {
yield 'id,type,assetId,fileAssetId,path\n';
for await (const item of items) {
// no expectation of particularly bad filenames
// but they could potentially have a newline or quote character
yield `${item.id},${item.type},${item.assetId},${item.fileAssetId},"${item.path.replaceAll('"', '""')}"\n`;
}
}
return Readable.from(generator());
}
async getIntegrityReportFile(id: string): Promise<ImmichFileResponse> {
const { path } = await this.integrityRepository.getById(id);
return new ImmichFileResponse({
path,
fileName: basename(path),
contentType: 'application/octet-stream',
cacheControl: CacheControl.PrivateWithoutCache,
});
}
async deleteIntegrityReport(userId: string, id: string): Promise<void> {
const { path, assetId, fileAssetId } = await this.integrityRepository.getById(id);
if (assetId) {
await this.assetRepository.updateAll([assetId], {
deletedAt: new Date(),
status: AssetStatus.Trashed,
});
await this.eventRepository.emit('AssetTrashAll', {
assetIds: [assetId],
userId,
});
await this.integrityRepository.deleteById(id);
} else if (fileAssetId) {
await this.assetRepository.deleteFiles([{ id: fileAssetId }]);
} else {
await this.storageRepository.unlink(path);
await this.integrityRepository.deleteById(id);
}
}
private async queueRefreshAllUntrackedFiles() {
this.logger.log(`Checking for out of date untracked file reports...`);
const reports = this.integrityRepository.streamIntegrityReportsWithAssetChecksum(IntegrityReportType.UntrackedFile);
let total = 0;
for await (const batchReports of chunk(reports, JOBS_LIBRARY_PAGINATION_SIZE)) {
await this.jobRepository.queue({
name: JobName.IntegrityUntrackedFilesRefresh,
data: {
items: batchReports,
},
});
total += batchReports.length;
this.logger.log(`Queued report check of ${batchReports.length} report(s) (${total} so far)`);
}
}
@OnJob({ name: JobName.IntegrityUntrackedFilesQueueAll, queue: QueueName.IntegrityCheck })
async handleUntrackedFilesQueueAll({ refreshOnly }: IIntegrityJob = {}): Promise<JobStatus> {
await this.queueRefreshAllUntrackedFiles();
if (refreshOnly) {
this.logger.log('Refresh complete.');
return JobStatus.Success;
}
this.logger.log(`Scanning for untracked files...`);
const assetPaths = this.storageRepository.walk({
pathsToCrawl: [StorageFolder.EncodedVideo, StorageFolder.Library, StorageFolder.Upload].map((folder) =>
StorageCore.getBaseFolder(folder),
),
includeHidden: false,
take: JOBS_LIBRARY_PAGINATION_SIZE,
});
const assetFilePaths = this.storageRepository.walk({
pathsToCrawl: [StorageCore.getBaseFolder(StorageFolder.Thumbnails)],
includeHidden: false,
take: JOBS_LIBRARY_PAGINATION_SIZE,
});
async function* paths() {
for await (const batch of assetPaths) {
yield ['asset', batch] as const;
}
for await (const batch of assetFilePaths) {
yield ['asset_file', batch] as const;
}
}
let total = 0;
for await (const [batchType, batchPaths] of paths()) {
await this.jobRepository.queue({
name: JobName.IntegrityUntrackedFiles,
data: {
type: batchType,
paths: batchPaths,
},
});
const count = batchPaths.length;
total += count;
this.logger.log(`Queued untracked check of ${count} file(s) (${total} so far)`);
}
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityUntrackedFiles, queue: QueueName.IntegrityCheck })
async handleUntrackedFiles({ type, paths }: IIntegrityUntrackedFilesJob): Promise<JobStatus> {
this.logger.log(`Processing batch of ${paths.length} files to check if they are untracked.`);
const untrackedFiles = new Set<string>(paths);
if (type === 'asset') {
const assets = await this.integrityRepository.getAssetPathsByPaths(paths);
for (const { originalPath, encodedVideoPath } of assets) {
untrackedFiles.delete(originalPath);
if (encodedVideoPath) {
untrackedFiles.delete(encodedVideoPath);
}
}
} else {
const assets = await this.integrityRepository.getAssetFilePathsByPaths(paths);
for (const { path } of assets) {
untrackedFiles.delete(path);
}
}
if (untrackedFiles.size > 0) {
await this.integrityRepository.create(
[...untrackedFiles].map((path) => ({
type: IntegrityReportType.UntrackedFile,
path,
})),
);
}
this.logger.log(`Processed ${paths.length} and found ${untrackedFiles.size} untracked file(s).`);
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityUntrackedFilesRefresh, queue: QueueName.IntegrityCheck })
async handleUntrackedRefresh({ items }: IIntegrityPathWithReportJob): Promise<JobStatus> {
this.logger.log(`Processing batch of ${items.length} reports to check if they are out of date.`);
const results = await Promise.all(
items.map(({ reportId, path }) =>
this.storageRepository
.stat(path)
.then(() => void 0)
.catch(() => reportId),
),
);
const reportIds = results.filter(Boolean) as string[];
if (reportIds.length > 0) {
await this.integrityRepository.deleteByIds(reportIds);
}
this.logger.log(`Processed ${items.length} paths and found ${reportIds.length} report(s) out of date.`);
return JobStatus.Success;
}
private async queueRefreshAllMissingFiles() {
this.logger.log(`Checking for out of date missing file reports...`);
const reports = this.integrityRepository.streamIntegrityReportsWithAssetChecksum(IntegrityReportType.MissingFile);
let total = 0;
for await (const batchReports of chunk(reports, JOBS_LIBRARY_PAGINATION_SIZE)) {
await this.jobRepository.queue({
name: JobName.IntegrityMissingFilesRefresh,
data: {
items: batchReports,
},
});
total += batchReports.length;
this.logger.log(`Queued report check of ${batchReports.length} report(s) (${total} so far)`);
}
this.logger.log('Refresh complete.');
}
@OnJob({ name: JobName.IntegrityMissingFilesQueueAll, queue: QueueName.IntegrityCheck })
async handleMissingFilesQueueAll({ refreshOnly }: IIntegrityJob = {}): Promise<JobStatus> {
if (refreshOnly) {
await this.queueRefreshAllMissingFiles();
return JobStatus.Success;
}
this.logger.log(`Scanning for missing files...`);
const assetPaths = this.integrityRepository.streamAssetPaths();
let total = 0;
for await (const batchPaths of chunk(assetPaths, JOBS_LIBRARY_PAGINATION_SIZE)) {
await this.jobRepository.queue({
name: JobName.IntegrityMissingFiles,
data: {
items: batchPaths,
},
});
total += batchPaths.length;
this.logger.log(`Queued missing check of ${batchPaths.length} file(s) (${total} so far)`);
}
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityMissingFiles, queue: QueueName.IntegrityCheck })
async handleMissingFiles({ items }: IIntegrityMissingFilesJob): Promise<JobStatus> {
this.logger.log(`Processing batch of ${items.length} files to check if they are missing.`);
const results = await Promise.all(
items.map((item) =>
this.storageRepository
.stat(item.path)
.then(() => ({ ...item, exists: true }))
.catch(() => ({ ...item, exists: false })),
),
);
const outdatedReports = results
.filter(({ exists, reportId }) => exists && reportId)
.map(({ reportId }) => reportId!);
if (outdatedReports.length > 0) {
await this.integrityRepository.deleteByIds(outdatedReports);
}
const missingFiles = results.filter(({ exists }) => !exists);
if (missingFiles.length > 0) {
await this.integrityRepository.create(
missingFiles.map(({ path, assetId, fileAssetId }) => ({
type: IntegrityReportType.MissingFile,
path,
assetId,
fileAssetId,
})),
);
}
this.logger.log(`Processed ${items.length} and found ${missingFiles.length} missing file(s).`);
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityMissingFilesRefresh, queue: QueueName.IntegrityCheck })
async handleMissingRefresh({ items: paths }: IIntegrityPathWithReportJob): Promise<JobStatus> {
this.logger.log(`Processing batch of ${paths.length} reports to check if they are out of date.`);
const results = await Promise.all(
paths.map(({ reportId, path }) =>
this.storageRepository
.stat(path)
.then(() => reportId)
.catch(() => void 0),
),
);
const reportIds = results.filter(Boolean) as string[];
if (reportIds.length > 0) {
await this.integrityRepository.deleteByIds(reportIds);
}
this.logger.log(`Processed ${paths.length} paths and found ${reportIds.length} report(s) out of date.`);
return JobStatus.Success;
}
private async queueRefreshAllChecksumFiles() {
this.logger.log(`Checking for out of date checksum file reports...`);
const reports = this.integrityRepository.streamIntegrityReportsWithAssetChecksum(IntegrityReportType.ChecksumFail);
let total = 0;
for await (const batchReports of chunk(reports, JOBS_LIBRARY_PAGINATION_SIZE)) {
await this.jobRepository.queue({
name: JobName.IntegrityChecksumFilesRefresh,
data: {
items: batchReports.map(({ path, reportId, checksum }) => ({
path,
reportId,
checksum: checksum?.toString('hex'),
})),
},
});
total += batchReports.length;
this.logger.log(`Queued report check of ${batchReports.length} report(s) (${total} so far)`);
}
this.logger.log('Refresh complete.');
}
private async checkAssetChecksum(
originalPath: string,
checksum: Buffer<ArrayBufferLike>,
assetId: string,
reportId: string | null,
) {
const hash = createHash('sha1');
try {
await pipeline([
this.storageRepository.createPlainReadStream(originalPath),
new Writable({
write(chunk, _encoding, callback) {
hash.update(chunk);
callback();
},
}),
]);
if (checksum.equals(hash.digest())) {
if (reportId) {
await this.integrityRepository.deleteById(reportId);
}
} else {
throw new Error('File failed checksum');
}
} catch (error) {
if ((error as { code?: string }).code === 'ENOENT') {
if (reportId) {
await this.integrityRepository.deleteById(reportId);
}
// missing file; handled by the missing files job
return;
}
this.logger.warn('Failed to process a file: ' + error);
await this.integrityRepository.create({
path: originalPath,
type: IntegrityReportType.ChecksumFail,
assetId,
});
}
}
@OnJob({ name: JobName.IntegrityChecksumFiles, queue: QueueName.IntegrityCheck })
async handleChecksumFiles({ refreshOnly }: IIntegrityJob = {}): Promise<JobStatus> {
if (refreshOnly) {
await this.queueRefreshAllChecksumFiles();
return JobStatus.Success;
}
const {
integrityChecks: {
checksumFiles: { timeLimit, percentageLimit },
},
} = await this.getConfig({
withCache: true,
});
this.logger.log(
`Checking file checksums... (will run for up to ${(timeLimit / (60 * 60 * 1000)).toFixed(2)} hours or until ${(percentageLimit * 100).toFixed(2)}% of assets are processed)`,
);
let processed = 0;
const startedAt = Date.now();
const { count } = await this.integrityRepository.getAssetCount();
const checkpoint = await this.systemMetadataRepository.get(SystemMetadataKey.IntegrityChecksumCheckpoint);
let startMarker: Date | undefined = checkpoint?.date ? new Date(checkpoint.date) : undefined;
let endMarker: Date | undefined;
const printStats = () => {
const averageTime = ((Date.now() - startedAt) / processed).toFixed(2);
const completionProgress = ((processed / count) * 100).toFixed(2);
this.logger.log(
`Processed ${processed} files so far... (avg. ${averageTime} ms/asset, ${completionProgress}% of all assets)`,
);
};
let lastCreatedAt: Date | undefined;
finishEarly: do {
this.logger.log(
`Processing assets in range [${startMarker?.toISOString() ?? 'beginning'}, ${endMarker?.toISOString() ?? 'end'}]`,
);
const assets = this.integrityRepository.streamAssetChecksums(startMarker, endMarker);
endMarker = startMarker;
startMarker = undefined;
for await (const { originalPath, checksum, createdAt, assetId, reportId } of assets) {
await this.checkAssetChecksum(originalPath, checksum, assetId, reportId);
processed++;
if (processed % 100 === 0) {
printStats();
}
if (Date.now() > startedAt + timeLimit || processed > count * percentageLimit) {
this.logger.log('Reached stop criteria.');
lastCreatedAt = createdAt;
break finishEarly;
}
}
} while (endMarker);
await this.systemMetadataRepository.set(SystemMetadataKey.IntegrityChecksumCheckpoint, {
date: lastCreatedAt?.toISOString(),
});
printStats();
if (lastCreatedAt) {
this.logger.log(`Finished checksum job, will continue from ${lastCreatedAt.toISOString()}.`);
} else {
this.logger.log(`Finished checksum job, covered all assets.`);
}
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityChecksumFilesRefresh, queue: QueueName.IntegrityCheck })
async handleChecksumRefresh({ items: paths }: IIntegrityPathWithChecksumJob): Promise<JobStatus> {
this.logger.log(`Processing batch of ${paths.length} reports to check if they are out of date.`);
const results = await Promise.all(
paths.map(async ({ reportId, path, checksum }) => {
if (!checksum) {
return reportId;
}
const hash = createHash('sha1');
try {
await pipeline([
this.storageRepository.createPlainReadStream(path),
new Writable({
write(chunk, _encoding, callback) {
hash.update(chunk);
callback();
},
}),
]);
} catch (error) {
if ((error as { code?: string }).code === 'ENOENT') {
return reportId;
}
}
if (Buffer.from(checksum, 'hex').equals(hash.digest())) {
return reportId;
}
}),
);
const reportIds = results.filter(Boolean) as string[];
if (reportIds.length > 0) {
await this.integrityRepository.deleteByIds(reportIds);
}
this.logger.log(`Processed ${paths.length} paths and found ${reportIds.length} report(s) out of date.`);
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityDeleteReportType, queue: QueueName.IntegrityCheck })
async handleDeleteAllIntegrityReports({ type }: IIntegrityDeleteReportTypeJob): Promise<JobStatus> {
this.logger.log(`Deleting all entries for ${type ?? 'all types of'} integrity report`);
let properties;
switch (type) {
case IntegrityReportType.ChecksumFail: {
properties = ['assetId'] as const;
break;
}
case IntegrityReportType.MissingFile: {
properties = ['assetId', 'fileAssetId'] as const;
break;
}
case IntegrityReportType.UntrackedFile: {
properties = [void 0] as const;
break;
}
default: {
properties = [void 0, 'assetId', 'fileAssetId'] as const;
break;
}
}
for (const property of properties) {
const reports = this.integrityRepository.streamIntegrityReportsByProperty(property, type);
for await (const batch of chunk(reports, JOBS_LIBRARY_PAGINATION_SIZE)) {
await this.jobRepository.queue({
name: JobName.IntegrityDeleteReports,
data: {
reports: batch,
},
});
this.logger.log(`Queued ${batch.length} reports to delete.`);
}
}
return JobStatus.Success;
}
@OnJob({ name: JobName.IntegrityDeleteReports, queue: QueueName.IntegrityCheck })
async handleDeleteIntegrityReports({ reports }: IIntegrityDeleteReportsJob): Promise<JobStatus> {
const byAsset = reports.filter((report) => report.assetId);
const byFileAsset = reports.filter((report) => report.fileAssetId);
const byPath = reports.filter((report) => !report.assetId && !report.fileAssetId);
if (byAsset.length > 0) {
const ids = byAsset.map(({ assetId }) => assetId!);
await this.assetRepository.updateAll(ids, {
deletedAt: new Date(),
status: AssetStatus.Trashed,
});
await this.eventRepository.emit('AssetTrashAll', {
assetIds: ids,
userId: '', // we don't notify any users currently
});
await this.integrityRepository.deleteByIds(byAsset.map(({ id }) => id));
}
if (byFileAsset.length > 0) {
await this.assetRepository.deleteFiles(byFileAsset.map(({ fileAssetId }) => ({ id: fileAssetId! })));
}
if (byPath.length > 0) {
await Promise.all(byPath.map(({ path }) => this.storageRepository.unlink(path).catch(() => void 0)));
await this.integrityRepository.deleteByIds(byPath.map(({ id }) => id));
}
this.logger.log(`Deleted ${reports.length} reports.`);
return JobStatus.Success;
}
}
async function* chunk<T>(generator: AsyncIterableIterator<T>, n: number) {
let chunk: T[] = [];
for await (const item of generator) {
chunk.push(item);
if (chunk.length === n) {
yield chunk;
chunk = [];
}
}
if (chunk.length > 0) {
yield chunk;
}
}

View File

@@ -2,7 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
import { OnEvent } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { JobCreateDto } from 'src/dtos/job.dto';
import { AssetType, AssetVisibility, IntegrityReportType, JobName, JobStatus, ManualJobName } from 'src/enum';
import { AssetType, AssetVisibility, JobName, JobStatus, ManualJobName } from 'src/enum';
import { ArgsOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem } from 'src/types';
@@ -34,42 +34,6 @@ const asJobItem = (dto: JobCreateDto): JobItem => {
return { name: JobName.DatabaseBackup };
}
case ManualJobName.IntegrityMissingFiles: {
return { name: JobName.IntegrityMissingFilesQueueAll };
}
case ManualJobName.IntegrityUntrackedFiles: {
return { name: JobName.IntegrityUntrackedFilesQueueAll };
}
case ManualJobName.IntegrityChecksumFiles: {
return { name: JobName.IntegrityChecksumFiles };
}
case ManualJobName.IntegrityMissingFilesRefresh: {
return { name: JobName.IntegrityMissingFilesQueueAll, data: { refreshOnly: true } };
}
case ManualJobName.IntegrityUntrackedFilesRefresh: {
return { name: JobName.IntegrityUntrackedFilesQueueAll, data: { refreshOnly: true } };
}
case ManualJobName.IntegrityChecksumFilesRefresh: {
return { name: JobName.IntegrityChecksumFiles, data: { refreshOnly: true } };
}
case ManualJobName.IntegrityMissingFilesDeleteAll: {
return { name: JobName.IntegrityDeleteReportType, data: { type: IntegrityReportType.MissingFile } };
}
case ManualJobName.IntegrityUntrackedFilesDeleteAll: {
return { name: JobName.IntegrityDeleteReportType, data: { type: IntegrityReportType.UntrackedFile } };
}
case ManualJobName.IntegrityChecksumFilesDeleteAll: {
return { name: JobName.IntegrityDeleteReportType, data: { type: IntegrityReportType.ChecksumFail } };
}
default: {
throw new BadRequestException('Invalid job name');
}
@@ -134,6 +98,7 @@ export class JobService extends BaseService {
case JobName.AssetEditThumbnailGeneration: {
const asset = await this.assetRepository.getById(item.data.id);
const edits = await this.assetEditRepository.getWithSyncInfo(item.data.id);
if (asset) {
this.websocketRepository.clientSend('AssetEditReadyV1', asset.ownerId, {
@@ -158,6 +123,7 @@ export class JobService extends BaseService {
height: asset.height,
isEdited: asset.isEdited,
},
edit: edits,
});
}

View File

@@ -23,10 +23,11 @@ import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { AssetFaceFactory } from 'test/factories/asset-face.factory';
import { AssetFactory } from 'test/factories/asset.factory';
import { PersonFactory } from 'test/factories/person.factory';
import { probeStub } from 'test/fixtures/media.stub';
import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
import { personThumbnailStub } from 'test/fixtures/person.stub';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { factory } from 'test/small.factory';
import { factory, newUuid } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const fullsizeBuffer = Buffer.from('embedded image data');
@@ -50,9 +51,10 @@ describe(MediaService.name, () => {
describe('handleQueueGenerateThumbnails', () => {
it('should queue all assets', async () => {
const asset = AssetFactory.create();
const person = PersonFactory.create({ faceAssetId: newUuid() });
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
mocks.person.getAll.mockReturnValue(makeStream([personStub.newThumbnail]));
mocks.person.getAll.mockReturnValue(makeStream([person]));
await sut.handleQueueGenerateThumbnails({ force: true });
@@ -68,7 +70,7 @@ describe(MediaService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.PersonGenerateThumbnail,
data: { id: personStub.newThumbnail.id },
data: { id: person.id },
},
]);
});
@@ -106,8 +108,13 @@ describe(MediaService.name, () => {
});
it('should queue all people with missing thumbnail path', async () => {
const [person1, person2] = [
PersonFactory.create({ thumbnailPath: undefined }),
PersonFactory.create({ thumbnailPath: undefined }),
];
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([AssetFactory.create()]));
mocks.person.getAll.mockReturnValue(makeStream([personStub.noThumbnail, personStub.noThumbnail]));
mocks.person.getAll.mockReturnValue(makeStream([person1, person2]));
mocks.person.getRandomFace.mockResolvedValueOnce(AssetFaceFactory.create());
await sut.handleQueueGenerateThumbnails({ force: false });
@@ -120,7 +127,7 @@ describe(MediaService.name, () => {
{
name: JobName.PersonGenerateThumbnail,
data: {
id: personStub.newThumbnail.id,
id: person1.id,
},
},
]);
@@ -276,17 +283,17 @@ describe(MediaService.name, () => {
describe('handleQueueMigration', () => {
it('should remove empty directories and queue jobs', async () => {
const asset = AssetFactory.create();
const person = PersonFactory.create();
mocks.assetJob.streamForMigrationJob.mockReturnValue(makeStream([asset]));
mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts);
mocks.person.getAll.mockReturnValue(makeStream([personStub.withName]));
mocks.person.getAll.mockReturnValue(makeStream([person]));
await expect(sut.handleQueueMigration()).resolves.toBe(JobStatus.Success);
expect(mocks.storage.removeEmptyDirs).toHaveBeenCalledTimes(2);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.AssetFileMigration, data: { id: asset.id } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.PersonFileMigration, data: { id: personStub.withName.id } },
]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.PersonFileMigration, data: { id: person.id } }]);
});
});
@@ -1479,8 +1486,9 @@ describe(MediaService.name, () => {
});
it('should skip a person without a face asset id', async () => {
mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
const person = PersonFactory.create({ faceAssetId: null });
mocks.person.getById.mockResolvedValue(person);
await sut.handleGeneratePersonThumbnail({ id: person.id });
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
});
@@ -1490,17 +1498,17 @@ describe(MediaService.name, () => {
});
it('should generate a thumbnail', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(person.id);
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailMiddle.originalPath, {
colorspace: Colorspace.P3,
@@ -1531,21 +1539,21 @@ describe(MediaService.name, () => {
},
expect.any(String),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', thumbnailPath: expect.any(String) });
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, thumbnailPath: expect.any(String) });
});
it('should use preview path if video', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.videoThumbnail);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(person.id);
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
expect(mocks.media.decodeImage).toHaveBeenCalledWith(expect.any(String), {
colorspace: Colorspace.P3,
@@ -1576,19 +1584,19 @@ describe(MediaService.name, () => {
},
expect.any(String),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', thumbnailPath: expect.any(String) });
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, thumbnailPath: expect.any(String) });
});
it('should generate a thumbnail without going negative', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailStart.originalPath, {
colorspace: Colorspace.P3,
@@ -1622,16 +1630,16 @@ describe(MediaService.name, () => {
});
it('should generate a thumbnail without overflowing', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailEnd);
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
mocks.person.update.mockResolvedValue(person);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailEnd.originalPath, {
colorspace: Colorspace.P3,
@@ -1665,16 +1673,16 @@ describe(MediaService.name, () => {
});
it('should handle negative coordinates', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.negativeCoordinate);
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
mocks.person.update.mockResolvedValue(person);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 4624, height: 3080 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.negativeCoordinate.originalPath, {
colorspace: Colorspace.P3,
@@ -1708,16 +1716,16 @@ describe(MediaService.name, () => {
});
it('should handle overflowing coordinate', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.overflowingCoordinate);
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
mocks.person.update.mockResolvedValue(person);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 4624, height: 3080 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.overflowingCoordinate.originalPath, {
colorspace: Colorspace.P3,
@@ -1751,9 +1759,11 @@ describe(MediaService.name, () => {
});
it('should use embedded preview if enabled and raw image', async () => {
const person = PersonFactory.create();
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
mocks.person.update.mockResolvedValue(person);
mocks.media.generateThumbnail.mockResolvedValue();
const extracted = Buffer.from('');
const data = Buffer.from('');
@@ -1762,9 +1772,7 @@ describe(MediaService.name, () => {
mocks.media.decodeImage.mockResolvedValue({ data, info });
mocks.media.getImageMetadata.mockResolvedValue({ width: 2160, height: 3840, isTransparent: false });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extracted, {
@@ -1799,21 +1807,23 @@ describe(MediaService.name, () => {
});
it('should not use embedded preview if enabled and not raw image', async () => {
const person = PersonFactory.create();
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.extract).not.toHaveBeenCalled();
expect(mocks.media.generateThumbnail).toHaveBeenCalled();
});
it('should not use embedded preview if enabled and raw image if not exists', async () => {
const person = PersonFactory.create();
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
mocks.media.generateThumbnail.mockResolvedValue();
@@ -1821,9 +1831,7 @@ describe(MediaService.name, () => {
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {
@@ -1835,6 +1843,8 @@ describe(MediaService.name, () => {
});
it('should not use embedded preview if enabled and raw image if low resolution', async () => {
const person = PersonFactory.create();
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
mocks.media.generateThumbnail.mockResolvedValue();
@@ -1845,9 +1855,7 @@ describe(MediaService.name, () => {
mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.Jpeg });
mocks.media.getImageMetadata.mockResolvedValue({ width: 1000, height: 1000, isTransparent: false });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
);
await expect(sut.handleGeneratePersonThumbnail({ id: person.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {

View File

@@ -16,8 +16,8 @@ import {
import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { PersonFactory } from 'test/factories/person.factory';
import { probeStub } from 'test/fixtures/media.stub';
import { personStub } from 'test/fixtures/person.stub';
import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
@@ -1208,18 +1208,18 @@ describe(MetadataService.name, () => {
it('should apply metadata face tags creating new people', async () => {
const asset = AssetFactory.create();
const person = PersonFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } });
mockReadTags(makeFaceTags({ Name: personStub.withName.name }));
mockReadTags(makeFaceTags({ Name: person.name }));
mocks.person.getDistinctNames.mockResolvedValue([]);
mocks.person.createAll.mockResolvedValue([personStub.withName.id]);
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.person.createAll.mockResolvedValue([person.id]);
mocks.person.update.mockResolvedValue(person);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.person.getDistinctNames).toHaveBeenCalledWith(asset.ownerId, { withHidden: true });
expect(mocks.person.createAll).toHaveBeenCalledWith([
expect.objectContaining({ name: personStub.withName.name }),
]);
expect(mocks.person.createAll).toHaveBeenCalledWith([expect.objectContaining({ name: person.name })]);
expect(mocks.person.refreshFaces).toHaveBeenCalledWith(
[
{
@@ -1243,19 +1243,21 @@ describe(MetadataService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.PersonGenerateThumbnail,
data: { id: personStub.withName.id },
data: { id: person.id },
},
]);
});
it('should assign metadata face tags to existing persons', async () => {
const asset = AssetFactory.create();
const person = PersonFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } });
mockReadTags(makeFaceTags({ Name: personStub.withName.name }));
mocks.person.getDistinctNames.mockResolvedValue([{ id: personStub.withName.id, name: personStub.withName.name }]);
mockReadTags(makeFaceTags({ Name: person.name }));
mocks.person.getDistinctNames.mockResolvedValue([{ id: person.id, name: person.name }]);
mocks.person.createAll.mockResolvedValue([]);
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.person.update.mockResolvedValue(person);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.person.getDistinctNames).toHaveBeenCalledWith(asset.ownerId, { withHidden: true });
@@ -1265,7 +1267,7 @@ describe(MetadataService.name, () => {
{
id: 'random-uuid',
assetId: asset.id,
personId: personStub.withName.id,
personId: person.id,
imageHeight: 100,
imageWidth: 1000,
boundingBoxX1: 0,
@@ -1335,21 +1337,20 @@ describe(MetadataService.name, () => {
async ({ orientation, expected }) => {
const { imgW, imgH, x1, x2, y1, y2 } = expected;
const asset = AssetFactory.create();
const person = PersonFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } });
mockReadTags(makeFaceTags({ Name: personStub.withName.name }, orientation));
mockReadTags(makeFaceTags({ Name: person.name }, orientation));
mocks.person.getDistinctNames.mockResolvedValue([]);
mocks.person.createAll.mockResolvedValue([personStub.withName.id]);
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.person.createAll.mockResolvedValue([person.id]);
mocks.person.update.mockResolvedValue(person);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.person.getDistinctNames).toHaveBeenCalledWith(asset.ownerId, {
withHidden: true,
});
expect(mocks.person.createAll).toHaveBeenCalledWith([
expect.objectContaining({ name: personStub.withName.name }),
]);
expect(mocks.person.createAll).toHaveBeenCalledWith([expect.objectContaining({ name: person.name })]);
expect(mocks.person.refreshFaces).toHaveBeenCalledWith(
[
{
@@ -1373,7 +1374,7 @@ describe(MetadataService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.PersonGenerateThumbnail,
data: { id: personStub.withName.id },
data: { id: person.id },
},
]);
},
@@ -1422,6 +1423,20 @@ describe(MetadataService.name, () => {
);
});
it('should handle 0 as unrated -> null', async () => {
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mockReadTags({ Rating: 0 });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
rating: null,
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
it('should handle valid negative rating value', async () => {
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
@@ -1779,6 +1794,28 @@ describe(MetadataService.name, () => {
'timeZone',
]);
});
it('should write rating', async () => {
const asset = factory.jobAssets.sidecarWrite();
asset.exifInfo.rating = 4;
mocks.assetJob.getLockedPropertiesForMetadataExtraction.mockResolvedValue(['rating']);
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset);
await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, { Rating: 4 });
expect(mocks.asset.unlockProperties).toHaveBeenCalledWith(asset.id, ['rating']);
});
it('should write null rating as 0', async () => {
const asset = factory.jobAssets.sidecarWrite();
asset.exifInfo.rating = null;
mocks.assetJob.getLockedPropertiesForMetadataExtraction.mockResolvedValue(['rating']);
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset);
await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, { Rating: 0 });
expect(mocks.asset.unlockProperties).toHaveBeenCalledWith(asset.id, ['rating']);
});
});
describe('firstDateTime', () => {

View File

@@ -301,7 +301,7 @@ export class MetadataService extends BaseService {
// comments
description: String(exifTags.ImageDescription || exifTags.Description || '').trim(),
profileDescription: exifTags.ProfileDescription || null,
rating: validateRange(exifTags.Rating, -1, 5),
rating: exifTags.Rating === 0 ? null : validateRange(exifTags.Rating, -1, 5),
// grouping
livePhotoCID: (exifTags.ContentIdentifier || exifTags.MediaGroupUUID) ?? null,
@@ -451,7 +451,7 @@ export class MetadataService extends BaseService {
dateTimeOriginal: asset.exifInfo.dateTimeOriginal as string | null,
latitude: asset.exifInfo.latitude,
longitude: asset.exifInfo.longitude,
rating: asset.exifInfo.rating,
rating: asset.exifInfo.rating ?? 0,
tags: asset.exifInfo.tags,
timeZone: asset.exifInfo.timeZone,
},

View File

@@ -1,6 +1,6 @@
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
import { mapFaces, mapPerson } from 'src/dtos/person.dto';
import { AssetFileType, CacheControl, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
import { FaceSearchResult } from 'src/repositories/search.repository';
import { PersonService } from 'src/services/person.service';
@@ -11,25 +11,11 @@ import { AuthFactory } from 'test/factories/auth.factory';
import { PersonFactory } from 'test/factories/person.factory';
import { UserFactory } from 'test/factories/user.factory';
import { authStub } from 'test/fixtures/auth.stub';
import { personStub } from 'test/fixtures/person.stub';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { getAsDetectedFace, getForFacialRecognitionJob } from 'test/mappers';
import { newDate, newUuid } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const responseDto: PersonResponseDto = {
id: 'person-1',
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
isHidden: false,
updatedAt: expect.any(Date),
isFavorite: false,
color: expect.any(String),
};
const statistics = { assets: 3 };
describe(PersonService.name, () => {
let sut: PersonService;
let mocks: ServiceMocks;
@@ -44,60 +30,54 @@ describe(PersonService.name, () => {
describe('getAll', () => {
it('should get all hidden and visible people with thumbnails', async () => {
const auth = AuthFactory.create();
const [person, hiddenPerson] = [PersonFactory.create(), PersonFactory.create({ isHidden: true })];
mocks.person.getAllForUser.mockResolvedValue({
items: [personStub.withName, personStub.hidden],
items: [person, hiddenPerson],
hasNextPage: false,
});
mocks.person.getNumberOfPeople.mockResolvedValue({ total: 2, hidden: 1 });
await expect(sut.getAll(authStub.admin, { withHidden: true, page: 1, size: 10 })).resolves.toEqual({
await expect(sut.getAll(auth, { withHidden: true, page: 1, size: 10 })).resolves.toEqual({
hasNextPage: false,
total: 2,
hidden: 1,
people: [
responseDto,
{
id: 'person-1',
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
expect.objectContaining({ id: person.id, isHidden: false }),
expect.objectContaining({
id: hiddenPerson.id,
isHidden: true,
isFavorite: false,
updatedAt: expect.any(Date),
color: expect.any(String),
},
}),
],
});
expect(mocks.person.getAllForUser).toHaveBeenCalledWith({ skip: 0, take: 10 }, authStub.admin.user.id, {
expect(mocks.person.getAllForUser).toHaveBeenCalledWith({ skip: 0, take: 10 }, auth.user.id, {
minimumFaceCount: 3,
withHidden: true,
});
});
it('should get all visible people and favorites should be first in the array', async () => {
const auth = AuthFactory.create();
const [isFavorite, person] = [PersonFactory.create({ isFavorite: true }), PersonFactory.create()];
mocks.person.getAllForUser.mockResolvedValue({
items: [personStub.isFavorite, personStub.withName],
items: [isFavorite, person],
hasNextPage: false,
});
mocks.person.getNumberOfPeople.mockResolvedValue({ total: 2, hidden: 1 });
await expect(sut.getAll(authStub.admin, { withHidden: false, page: 1, size: 10 })).resolves.toEqual({
await expect(sut.getAll(auth, { withHidden: false, page: 1, size: 10 })).resolves.toEqual({
hasNextPage: false,
total: 2,
hidden: 1,
people: [
{
id: 'person-4',
name: personStub.isFavorite.name,
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
isHidden: false,
expect.objectContaining({
id: isFavorite.id,
isFavorite: true,
updatedAt: expect.any(Date),
color: personStub.isFavorite.color,
},
responseDto,
}),
expect.objectContaining({ id: person.id, isFavorite: false }),
],
});
expect(mocks.person.getAllForUser).toHaveBeenCalledWith({ skip: 0, take: 10 }, authStub.admin.user.id, {
expect(mocks.person.getAllForUser).toHaveBeenCalledWith({ skip: 0, take: 10 }, auth.user.id, {
minimumFaceCount: 3,
withHidden: false,
});
@@ -106,71 +86,89 @@ describe(PersonService.name, () => {
describe('getById', () => {
it('should require person.read permission', async () => {
mocks.person.getById.mockResolvedValue(personStub.withName);
await expect(sut.getById(authStub.admin, 'person-1')).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
await expect(sut.getById(auth, person.id)).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should throw a bad request when person is not found', async () => {
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getById(authStub.admin, 'person-1')).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
const auth = AuthFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['unknown']));
await expect(sut.getById(auth, 'unknown')).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set(['unknown']));
});
it('should get a person by id', async () => {
mocks.person.getById.mockResolvedValue(personStub.withName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getById(authStub.admin, 'person-1')).resolves.toEqual(responseDto);
expect(mocks.person.getById).toHaveBeenCalledWith('person-1');
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.getById(auth, person.id)).resolves.toEqual(expect.objectContaining({ id: person.id }));
expect(mocks.person.getById).toHaveBeenCalledWith(person.id);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
});
describe('getThumbnail', () => {
it('should require person.read permission', async () => {
mocks.person.getById.mockResolvedValue(personStub.noName);
await expect(sut.getThumbnail(authStub.admin, 'person-1')).rejects.toBeInstanceOf(BadRequestException);
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
await expect(sut.getThumbnail(auth, person.id)).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.storage.createReadStream).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should throw an error when personId is invalid', async () => {
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getThumbnail(authStub.admin, 'person-1')).rejects.toBeInstanceOf(NotFoundException);
const auth = AuthFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['unknown']));
await expect(sut.getThumbnail(auth, 'unknown')).rejects.toBeInstanceOf(NotFoundException);
expect(mocks.storage.createReadStream).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set(['unknown']));
});
it('should throw an error when person has no thumbnail', async () => {
mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getThumbnail(authStub.admin, 'person-1')).rejects.toBeInstanceOf(NotFoundException);
const auth = AuthFactory.create();
const person = PersonFactory.create({ thumbnailPath: '' });
mocks.person.getById.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.getThumbnail(auth, person.id)).rejects.toBeInstanceOf(NotFoundException);
expect(mocks.storage.createReadStream).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should serve the thumbnail', async () => {
mocks.person.getById.mockResolvedValue(personStub.noName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getThumbnail(authStub.admin, 'person-1')).resolves.toEqual(
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.getThumbnail(auth, person.id)).resolves.toEqual(
new ImmichFileResponse({
path: '/path/to/thumbnail.jpg',
path: person.thumbnailPath,
contentType: 'image/jpeg',
cacheControl: CacheControl.PrivateWithoutCache,
}),
);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
});
describe('update', () => {
it('should require person.write permission', async () => {
mocks.person.getById.mockResolvedValue(personStub.noName);
await expect(sut.update(authStub.admin, 'person-1', { name: 'Person 1' })).rejects.toBeInstanceOf(
BadRequestException,
);
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
await expect(sut.update(auth, person.id, { name: 'Person 1' })).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should throw an error when personId is invalid', async () => {
@@ -183,86 +181,108 @@ describe(PersonService.name, () => {
});
it("should update a person's name", async () => {
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create({ name: 'Person 1' });
await expect(sut.update(authStub.admin, 'person-1', { name: 'Person 1' })).resolves.toEqual(responseDto);
mocks.person.update.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', name: 'Person 1' });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
await expect(sut.update(auth, person.id, { name: 'Person 1' })).resolves.toEqual(
expect.objectContaining({ id: person.id, name: 'Person 1' }),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, name: 'Person 1' });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it("should update a person's date of birth", async () => {
mocks.person.update.mockResolvedValue(personStub.withBirthDate);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create({ birthDate: new Date('1976-06-30') });
await expect(sut.update(authStub.admin, 'person-1', { birthDate: new Date('1976-06-30') })).resolves.toEqual({
id: 'person-1',
name: 'Person 1',
mocks.person.update.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.update(auth, person.id, { birthDate: new Date('1976-06-30') })).resolves.toEqual({
id: person.id,
name: person.name,
birthDate: '1976-06-30',
thumbnailPath: '/path/to/thumbnail.jpg',
thumbnailPath: person.thumbnailPath,
isHidden: false,
isFavorite: false,
updatedAt: expect.any(Date),
color: expect.any(String),
});
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', birthDate: new Date('1976-06-30') });
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, birthDate: new Date('1976-06-30') });
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should update a person visibility', async () => {
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create({ isHidden: true });
await expect(sut.update(authStub.admin, 'person-1', { isHidden: false })).resolves.toEqual(responseDto);
mocks.person.update.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', isHidden: false });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
await expect(sut.update(auth, person.id, { isHidden: true })).resolves.toEqual(
expect.objectContaining({ isHidden: true }),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, isHidden: true });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should update a person favorite status', async () => {
mocks.person.update.mockResolvedValue(personStub.withName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create({ isFavorite: true });
await expect(sut.update(authStub.admin, 'person-1', { isFavorite: true })).resolves.toEqual(responseDto);
mocks.person.update.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', isFavorite: true });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
await expect(sut.update(auth, person.id, { isFavorite: true })).resolves.toEqual(
expect.objectContaining({ isFavorite: true }),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, isFavorite: true });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it("should update a person's thumbnailPath", async () => {
const face = AssetFaceFactory.create();
const auth = AuthFactory.create();
mocks.person.update.mockResolvedValue(personStub.withName);
const person = PersonFactory.create();
mocks.person.update.mockResolvedValue(person);
mocks.person.getForFeatureFaceUpdate.mockResolvedValue(face);
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([face.assetId]));
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.update(auth, 'person-1', { featureFaceAssetId: face.assetId })).resolves.toEqual(responseDto);
await expect(sut.update(auth, person.id, { featureFaceAssetId: face.assetId })).resolves.toEqual(
expect.objectContaining({ id: person.id }),
);
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'person-1', faceAssetId: face.id });
expect(mocks.person.update).toHaveBeenCalledWith({ id: person.id, faceAssetId: face.id });
expect(mocks.person.getForFeatureFaceUpdate).toHaveBeenCalledWith({
assetId: face.assetId,
personId: 'person-1',
personId: person.id,
});
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.PersonGenerateThumbnail,
data: { id: 'person-1' },
data: { id: person.id },
});
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should throw an error when the face feature assetId is invalid', async () => {
mocks.person.getById.mockResolvedValue(personStub.withName);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
await expect(sut.update(authStub.admin, 'person-1', { featureFaceAssetId: '-1' })).rejects.toThrow(
BadRequestException,
);
mocks.person.getById.mockResolvedValue(person);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.update(auth, person.id, { featureFaceAssetId: '-1' })).rejects.toThrow(BadRequestException);
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
});
@@ -283,36 +303,39 @@ describe(PersonService.name, () => {
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set());
await expect(
sut.reassignFaces(authStub.admin, personStub.noName.id, {
sut.reassignFaces(AuthFactory.create(), 'person-id', {
data: [{ personId: 'asset-face-1', assetId: '' }],
}),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.job.queue).not.toHaveBeenCalledWith();
expect(mocks.job.queueAll).not.toHaveBeenCalledWith();
});
it('should reassign a face', async () => {
const face = AssetFaceFactory.create();
const auth = AuthFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([personStub.withName.id]));
mocks.person.getById.mockResolvedValue(personStub.noName);
const person = PersonFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
mocks.person.getById.mockResolvedValue(person);
mocks.access.person.checkFaceOwnerAccess.mockResolvedValue(new Set([face.id]));
mocks.person.getFacesByIds.mockResolvedValue([face]);
mocks.person.reassignFace.mockResolvedValue(1);
mocks.person.getRandomFace.mockResolvedValue(AssetFaceFactory.create());
mocks.person.refreshFaces.mockResolvedValue();
mocks.person.reassignFace.mockResolvedValue(5);
mocks.person.update.mockResolvedValue(personStub.noName);
mocks.person.update.mockResolvedValue(person);
await expect(
sut.reassignFaces(auth, personStub.noName.id, {
data: [{ personId: personStub.withName.id, assetId: face.assetId }],
sut.reassignFaces(auth, person.id, {
data: [{ personId: person.id, assetId: face.assetId }],
}),
).resolves.toBeDefined();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.PersonGenerateThumbnail,
data: { id: personStub.newThumbnail.id },
data: { id: person.id },
},
]);
});
@@ -320,7 +343,7 @@ describe(PersonService.name, () => {
describe('handlePersonMigration', () => {
it('should not move person files', async () => {
await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.Failed);
await expect(sut.handlePersonMigration(PersonFactory.create())).resolves.toBe(JobStatus.Failed);
});
});
@@ -347,12 +370,14 @@ describe(PersonService.name, () => {
describe('createNewFeaturePhoto', () => {
it('should change person feature photo', async () => {
const person = PersonFactory.create();
mocks.person.getRandomFace.mockResolvedValue(AssetFaceFactory.create());
await sut.createNewFeaturePhoto([personStub.newThumbnail.id]);
await sut.createNewFeaturePhoto([person.id]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.PersonGenerateThumbnail,
data: { id: personStub.newThumbnail.id },
data: { id: person.id },
},
]);
});
@@ -361,23 +386,22 @@ describe(PersonService.name, () => {
describe('reassignFacesById', () => {
it('should create a new person', async () => {
const face = AssetFaceFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([personStub.noName.id]));
const person = PersonFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
mocks.access.person.checkFaceOwnerAccess.mockResolvedValue(new Set([face.id]));
mocks.person.getFaceById.mockResolvedValue(face);
mocks.person.reassignFace.mockResolvedValue(1);
mocks.person.getById.mockResolvedValue(personStub.noName);
await expect(sut.reassignFacesById(AuthFactory.create(), personStub.noName.id, { id: face.id })).resolves.toEqual(
{
birthDate: personStub.noName.birthDate,
isHidden: personStub.noName.isHidden,
isFavorite: personStub.noName.isFavorite,
id: personStub.noName.id,
name: personStub.noName.name,
thumbnailPath: personStub.noName.thumbnailPath,
updatedAt: expect.any(Date),
color: personStub.noName.color,
},
);
mocks.person.getById.mockResolvedValue(person);
await expect(sut.reassignFacesById(AuthFactory.create(), person.id, { id: face.id })).resolves.toEqual({
birthDate: person.birthDate,
isHidden: person.isHidden,
isFavorite: person.isFavorite,
id: person.id,
name: person.name,
thumbnailPath: person.thumbnailPath,
updatedAt: expect.any(Date),
});
expect(mocks.job.queue).not.toHaveBeenCalledWith();
expect(mocks.job.queueAll).not.toHaveBeenCalledWith();
@@ -385,12 +409,14 @@ describe(PersonService.name, () => {
it('should fail if user has not the correct permissions on the asset', async () => {
const face = AssetFaceFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([personStub.noName.id]));
const person = PersonFactory.create();
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
mocks.person.getFaceById.mockResolvedValue(face);
mocks.person.reassignFace.mockResolvedValue(1);
mocks.person.getById.mockResolvedValue(personStub.noName);
mocks.person.getById.mockResolvedValue(person);
await expect(
sut.reassignFacesById(AuthFactory.create(), personStub.noName.id, {
sut.reassignFacesById(AuthFactory.create(), person.id, {
id: face.id,
}),
).rejects.toBeInstanceOf(BadRequestException);
@@ -402,22 +428,25 @@ describe(PersonService.name, () => {
describe('createPerson', () => {
it('should create a new person', async () => {
mocks.person.create.mockResolvedValue(personStub.primaryPerson);
const auth = AuthFactory.create();
await expect(sut.create(authStub.admin, {})).resolves.toBeDefined();
mocks.person.create.mockResolvedValue(PersonFactory.create());
await expect(sut.create(auth, {})).resolves.toBeDefined();
expect(mocks.person.create).toHaveBeenCalledWith({ ownerId: authStub.admin.user.id });
expect(mocks.person.create).toHaveBeenCalledWith({ ownerId: auth.user.id });
});
});
describe('handlePersonCleanup', () => {
it('should delete people without faces', async () => {
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.noName]);
const person = PersonFactory.create();
mocks.person.getAllWithoutFaces.mockResolvedValue([person]);
await sut.handlePersonCleanup();
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.noName.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.noName.thumbnailPath);
expect(mocks.person.delete).toHaveBeenCalledWith([person.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(person.thumbnailPath);
});
});
@@ -449,15 +478,17 @@ describe(PersonService.name, () => {
it('should queue all assets', async () => {
const asset = AssetFactory.create();
const person = PersonFactory.create();
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([asset]));
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
mocks.person.getAllWithoutFaces.mockResolvedValue([person]);
await sut.handleQueueDetectFaces({ force: true });
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MachineLearning });
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
expect(mocks.person.delete).toHaveBeenCalledWith([person.id]);
expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: true });
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
expect(mocks.storage.unlink).toHaveBeenCalledWith(person.thumbnailPath);
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
@@ -490,10 +521,12 @@ describe(PersonService.name, () => {
it('should delete existing people and faces if forced', async () => {
const asset = AssetFactory.create();
const face = AssetFaceFactory.from().person().build();
mocks.person.getAll.mockReturnValue(makeStream([face.person!, personStub.randomPerson]));
const person = PersonFactory.create();
mocks.person.getAll.mockReturnValue(makeStream([face.person!, person]));
mocks.person.getAllFaces.mockReturnValue(makeStream([face]));
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([asset]));
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
mocks.person.getAllWithoutFaces.mockResolvedValue([person]);
mocks.person.deleteFaces.mockResolvedValue();
await sut.handleQueueDetectFaces({ force: true });
@@ -505,8 +538,8 @@ describe(PersonService.name, () => {
data: { id: asset.id },
},
]);
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.randomPerson.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.randomPerson.thumbnailPath);
expect(mocks.person.delete).toHaveBeenCalledWith([person.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(person.thumbnailPath);
expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: true });
});
});
@@ -661,6 +694,8 @@ describe(PersonService.name, () => {
it('should delete existing people if forced', async () => {
const face = AssetFaceFactory.from().person().build();
const person = PersonFactory.create();
mocks.job.getJobCounts.mockResolvedValue({
active: 1,
waiting: 0,
@@ -669,9 +704,9 @@ describe(PersonService.name, () => {
failed: 0,
delayed: 0,
});
mocks.person.getAll.mockReturnValue(makeStream([face.person!, personStub.randomPerson]));
mocks.person.getAll.mockReturnValue(makeStream([face.person!, person]));
mocks.person.getAllFaces.mockReturnValue(makeStream([face]));
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
mocks.person.getAllWithoutFaces.mockResolvedValue([person]);
mocks.person.unassignFaces.mockResolvedValue();
await sut.handleQueueRecognizeFaces({ force: true });
@@ -684,8 +719,8 @@ describe(PersonService.name, () => {
data: { id: face.id, deferred: false },
},
]);
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.randomPerson.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.randomPerson.thumbnailPath);
expect(mocks.person.delete).toHaveBeenCalledWith([person.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(person.thumbnailPath);
expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: false });
});
});
@@ -1059,59 +1094,71 @@ describe(PersonService.name, () => {
describe('mergePerson', () => {
it('should require person.write and person.merge permission', async () => {
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
mocks.person.getById.mockResolvedValueOnce(personStub.mergePerson);
const auth = AuthFactory.create();
const [person, mergePerson] = [PersonFactory.create(), PersonFactory.create()];
await expect(sut.mergePerson(authStub.admin, 'person-1', { ids: ['person-2'] })).rejects.toBeInstanceOf(
mocks.person.getById.mockResolvedValueOnce(person);
mocks.person.getById.mockResolvedValueOnce(mergePerson);
await expect(sut.mergePerson(auth, person.id, { ids: [mergePerson.id] })).rejects.toBeInstanceOf(
BadRequestException,
);
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
expect(mocks.person.delete).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should merge two people without smart merge', async () => {
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
mocks.person.getById.mockResolvedValueOnce(personStub.mergePerson);
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-2']));
const auth = AuthFactory.create();
const [person, mergePerson] = [PersonFactory.create(), PersonFactory.create()];
await expect(sut.mergePerson(authStub.admin, 'person-1', { ids: ['person-2'] })).resolves.toEqual([
{ id: 'person-2', success: true },
mocks.person.getById.mockResolvedValueOnce(person);
mocks.person.getById.mockResolvedValueOnce(mergePerson);
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([person.id]));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([mergePerson.id]));
await expect(sut.mergePerson(auth, person.id, { ids: [mergePerson.id] })).resolves.toEqual([
{ id: mergePerson.id, success: true },
]);
expect(mocks.person.reassignFaces).toHaveBeenCalledWith({
newPersonId: personStub.primaryPerson.id,
oldPersonId: personStub.mergePerson.id,
newPersonId: person.id,
oldPersonId: mergePerson.id,
});
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should merge two people with smart merge', async () => {
mocks.person.getById.mockResolvedValueOnce(personStub.randomPerson);
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
mocks.person.update.mockResolvedValue({ ...personStub.randomPerson, name: personStub.primaryPerson.name });
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-3']));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
const auth = AuthFactory.create();
const [person, mergePerson] = [
PersonFactory.create({ name: undefined }),
PersonFactory.create({ name: 'Merge person' }),
];
await expect(sut.mergePerson(authStub.admin, 'person-3', { ids: ['person-1'] })).resolves.toEqual([
{ id: 'person-1', success: true },
mocks.person.getById.mockResolvedValueOnce(person);
mocks.person.getById.mockResolvedValueOnce(mergePerson);
mocks.person.update.mockResolvedValue({ ...person, name: mergePerson.name });
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([person.id]));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([mergePerson.id]));
await expect(sut.mergePerson(auth, person.id, { ids: [mergePerson.id] })).resolves.toEqual([
{ id: mergePerson.id, success: true },
]);
expect(mocks.person.reassignFaces).toHaveBeenCalledWith({
newPersonId: personStub.randomPerson.id,
oldPersonId: personStub.primaryPerson.id,
newPersonId: person.id,
oldPersonId: mergePerson.id,
});
expect(mocks.person.update).toHaveBeenCalledWith({
id: personStub.randomPerson.id,
name: personStub.primaryPerson.name,
id: person.id,
name: mergePerson.name,
});
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should throw an error when the primary person is not found', async () => {
@@ -1126,48 +1173,60 @@ describe(PersonService.name, () => {
});
it('should handle invalid merge ids', async () => {
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-2']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
await expect(sut.mergePerson(authStub.admin, 'person-1', { ids: ['person-2'] })).resolves.toEqual([
{ id: 'person-2', success: false, error: BulkIdErrorReason.NOT_FOUND },
mocks.person.getById.mockResolvedValueOnce(person);
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([person.id]));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['unknown']));
await expect(sut.mergePerson(auth, person.id, { ids: ['unknown'] })).resolves.toEqual([
{ id: 'unknown', success: false, error: BulkIdErrorReason.NOT_FOUND },
]);
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
expect(mocks.person.delete).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should handle an error reassigning faces', async () => {
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
mocks.person.getById.mockResolvedValueOnce(personStub.mergePerson);
mocks.person.reassignFaces.mockRejectedValue(new Error('update failed'));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-2']));
const auth = AuthFactory.create();
const [person, mergePerson] = [PersonFactory.create(), PersonFactory.create()];
await expect(sut.mergePerson(authStub.admin, 'person-1', { ids: ['person-2'] })).resolves.toEqual([
{ id: 'person-2', success: false, error: BulkIdErrorReason.UNKNOWN },
mocks.person.getById.mockResolvedValueOnce(person);
mocks.person.getById.mockResolvedValueOnce(mergePerson);
mocks.person.reassignFaces.mockRejectedValue(new Error('update failed'));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([person.id]));
mocks.access.person.checkOwnerAccess.mockResolvedValueOnce(new Set([mergePerson.id]));
await expect(sut.mergePerson(auth, person.id, { ids: [mergePerson.id] })).resolves.toEqual([
{ id: mergePerson.id, success: false, error: BulkIdErrorReason.UNKNOWN },
]);
expect(mocks.person.delete).not.toHaveBeenCalled();
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
});
describe('getStatistics', () => {
it('should get correct number of person', async () => {
mocks.person.getById.mockResolvedValue(personStub.primaryPerson);
mocks.person.getStatistics.mockResolvedValue(statistics);
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set(['person-1']));
await expect(sut.getStatistics(authStub.admin, 'person-1')).resolves.toEqual({ assets: 3 });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
mocks.person.getStatistics.mockResolvedValue({ assets: 3 });
mocks.access.person.checkOwnerAccess.mockResolvedValue(new Set([person.id]));
await expect(sut.getStatistics(auth, person.id)).resolves.toEqual({ assets: 3 });
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
it('should require person.read permission', async () => {
mocks.person.getById.mockResolvedValue(personStub.primaryPerson);
await expect(sut.getStatistics(authStub.admin, 'person-1')).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
const auth = AuthFactory.create();
const person = PersonFactory.create();
mocks.person.getById.mockResolvedValue(person);
await expect(sut.getStatistics(auth, person.id)).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(auth.user.id, new Set([person.id]));
});
});

View File

@@ -595,7 +595,7 @@ export class PersonService extends BaseService {
update.birthDate = mergePerson.birthDate;
}
if (Object.keys(update).length > 0) {
if (Object.keys(update).length > 1) {
primaryPerson = await this.personRepository.update(update);
}

View File

@@ -23,7 +23,7 @@ describe(QueueService.name, () => {
it('should update concurrency', () => {
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(19);
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(18);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5);
@@ -77,7 +77,6 @@ describe(QueueService.name, () => {
[QueueName.BackupDatabase]: expected,
[QueueName.Ocr]: expected,
[QueueName.Workflow]: expected,
[QueueName.IntegrityCheck]: expected,
[QueueName.Editor]: expected,
});
});

View File

@@ -5,7 +5,6 @@ import { SearchService } from 'src/services/search.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { AuthFactory } from 'test/factories/auth.factory';
import { authStub } from 'test/fixtures/auth.stub';
import { personStub } from 'test/fixtures/person.stub';
import { newTestService, ServiceMocks } from 'test/utils';
import { beforeEach, vitest } from 'vitest';
@@ -26,17 +25,18 @@ describe(SearchService.name, () => {
describe('searchPerson', () => {
it('should pass options to search', async () => {
const { name } = personStub.withName;
const auth = AuthFactory.create();
const name = 'foo';
mocks.person.getByName.mockResolvedValue([]);
await sut.searchPerson(authStub.user1, { name, withHidden: false });
await sut.searchPerson(auth, { name, withHidden: false });
expect(mocks.person.getByName).toHaveBeenCalledWith(authStub.user1.user.id, name, { withHidden: false });
expect(mocks.person.getByName).toHaveBeenCalledWith(auth.user.id, name, { withHidden: false });
await sut.searchPerson(authStub.user1, { name, withHidden: true });
await sut.searchPerson(auth, { name, withHidden: true });
expect(mocks.person.getByName).toHaveBeenCalledWith(authStub.user1.user.id, name, { withHidden: true });
expect(mocks.person.getByName).toHaveBeenCalledWith(auth.user.id, name, { withHidden: true });
});
});

View File

@@ -89,6 +89,7 @@ export const SYNC_TYPES_ORDER = [
SyncRequestType.AssetFacesV2,
SyncRequestType.UserMetadataV1,
SyncRequestType.AssetMetadataV1,
SyncRequestType.AssetEditsV1,
];
const throwSessionRequired = () => {
@@ -175,6 +176,7 @@ export class SyncService extends BaseService {
[SyncRequestType.PartnersV1]: () => this.syncPartnersV1(options, response, checkpointMap),
[SyncRequestType.AssetsV1]: () => this.syncAssetsV1(options, response, checkpointMap),
[SyncRequestType.AssetExifsV1]: () => this.syncAssetExifsV1(options, response, checkpointMap),
[SyncRequestType.AssetEditsV1]: () => this.syncAssetEditsV1(options, response, checkpointMap),
[SyncRequestType.PartnerAssetsV1]: () => this.syncPartnerAssetsV1(options, response, checkpointMap, session.id),
[SyncRequestType.AssetMetadataV1]: () => this.syncAssetMetadataV1(options, response, checkpointMap, auth),
[SyncRequestType.PartnerAssetExifsV1]: () =>
@@ -215,6 +217,7 @@ export class SyncService extends BaseService {
await this.syncRepository.asset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetFace.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetMetadata.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetEdit.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memory.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memoryToAsset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.partner.cleanupAuditTable(pruneThreshold);
@@ -352,6 +355,21 @@ export class SyncService extends BaseService {
}
}
private async syncAssetEditsV1(options: SyncQueryOptions, response: Writable, checkpointMap: CheckpointMap) {
const deleteType = SyncEntityType.AssetEditDeleteV1;
const deletes = this.syncRepository.assetEdit.getDeletes({ ...options, ack: checkpointMap[deleteType] });
for await (const { id, ...data } of deletes) {
send(response, { type: deleteType, ids: [id], data });
}
const upsertType = SyncEntityType.AssetEditV1;
const upserts = this.syncRepository.assetEdit.getUpserts({ ...options, ack: checkpointMap[upsertType] });
for await (const { updateId, ...data } of upserts) {
send(response, { type: upsertType, ids: [updateId], data });
}
}
private async syncPartnerAssetExifsV1(
options: SyncQueryOptions,
response: Writable,

View File

@@ -41,7 +41,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
[QueueName.Notification]: { concurrency: 5 },
[QueueName.Ocr]: { concurrency: 1 },
[QueueName.Workflow]: { concurrency: 5 },
[QueueName.IntegrityCheck]: { concurrency: 1 },
[QueueName.Editor]: { concurrency: 2 },
},
backup: {
@@ -74,22 +73,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
accelDecode: false,
tonemap: ToneMapping.Hable,
},
integrityChecks: {
untrackedFiles: {
enabled: true,
cronExpression: '0 03 * * *',
},
missingFiles: {
enabled: true,
cronExpression: '0 03 * * *',
},
checksumFiles: {
enabled: true,
cronExpression: '0 03 * * *',
timeLimit: 60 * 60 * 1000,
percentageLimit: 1,
},
},
logging: {
enabled: true,
level: LogLevel.Log,

View File

@@ -10,7 +10,6 @@ import {
AssetType,
ExifOrientation,
ImageFormat,
IntegrityReportType,
JobName,
MemoryType,
PluginTriggerType,
@@ -277,43 +276,6 @@ export interface IWorkflowJob<T extends PluginTriggerType = PluginTriggerType> {
event: WorkflowData[T];
}
export interface IIntegrityJob {
refreshOnly?: boolean;
}
export interface IIntegrityDeleteReportTypeJob {
type?: IntegrityReportType;
}
export interface IIntegrityDeleteReportsJob {
reports: {
id: string;
assetId: string | null;
fileAssetId: string | null;
path: string;
}[];
}
export interface IIntegrityUntrackedFilesJob {
type: 'asset' | 'asset_file';
paths: string[];
}
export interface IIntegrityMissingFilesJob {
items: ({ path: string; reportId: string | null } & (
| { assetId: string; fileAssetId: null }
| { assetId: null; fileAssetId: string }
))[];
}
export interface IIntegrityPathWithReportJob {
items: { path: string; reportId: string | null }[];
}
export interface IIntegrityPathWithChecksumJob {
items: { path: string; reportId: string | null; checksum?: string | null }[];
}
export interface JobCounts {
active: number;
completed: number;
@@ -425,18 +387,6 @@ export type JobItem =
// Workflow
| { name: JobName.WorkflowRun; data: IWorkflowJob }
// Integrity
| { name: JobName.IntegrityUntrackedFilesQueueAll; data?: IIntegrityJob }
| { name: JobName.IntegrityUntrackedFiles; data: IIntegrityUntrackedFilesJob }
| { name: JobName.IntegrityUntrackedFilesRefresh; data: IIntegrityPathWithReportJob }
| { name: JobName.IntegrityMissingFilesQueueAll; data?: IIntegrityJob }
| { name: JobName.IntegrityMissingFiles; data: IIntegrityPathWithReportJob }
| { name: JobName.IntegrityMissingFilesRefresh; data: IIntegrityPathWithReportJob }
| { name: JobName.IntegrityChecksumFiles; data?: IIntegrityJob }
| { name: JobName.IntegrityChecksumFilesRefresh; data?: IIntegrityPathWithChecksumJob }
| { name: JobName.IntegrityDeleteReportType; data: IIntegrityDeleteReportTypeJob }
| { name: JobName.IntegrityDeleteReports; data: IIntegrityDeleteReportsJob }
// Editor
| { name: JobName.AssetEditThumbnailGeneration; data: IEntityJob };
@@ -541,7 +491,6 @@ export interface SystemMetadata extends Record<SystemMetadataKey, Record<string,
[SystemMetadataKey.SystemFlags]: DeepPartial<SystemFlags>;
[SystemMetadataKey.VersionCheckState]: VersionCheckMetadata;
[SystemMetadataKey.MemoriesState]: MemoriesState;
[SystemMetadataKey.IntegrityChecksumCheckpoint]: { date?: string };
}
export type UserPreferences = {

View File

@@ -34,7 +34,6 @@ import { CronJob } from 'cron';
import { DateTime } from 'luxon';
import sanitize from 'sanitize-filename';
import { Property, PropertyOptions } from 'src/decorators';
import { IntegrityReportType } from 'src/enum';
import { isIP, isIPRange } from 'validator';
@Injectable()
@@ -133,13 +132,6 @@ export class UUIDParamDto {
id!: string;
}
export class UUIDv7ParamDto {
@IsNotEmpty()
@IsUUID('7')
@ApiProperty({ format: 'uuid' })
id!: string;
}
export class UUIDAssetIDParamDto {
@ValidateUUID()
id!: string;
@@ -148,12 +140,6 @@ export class UUIDAssetIDParamDto {
assetId!: string;
}
export class IntegrityReportTypeParamDto {
@IsNotEmpty()
@ApiProperty({ enum: IntegrityReportType, enumName: 'IntegrityReportType' })
type!: IntegrityReportType;
}
export class FilenameParamDto {
@IsNotEmpty()
@IsString()

View File

@@ -4,7 +4,7 @@ import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetFactory } from 'test/factories/asset.factory';
import { build } from 'test/factories/builder.factory';
import { AssetEditLike, AssetLike, FactoryBuilder } from 'test/factories/types';
import { newUuid } from 'test/small.factory';
import { newDate, newUuid } from 'test/small.factory';
export class AssetEditFactory {
private constructor(private readonly value: Selectable<AssetEditTable>) {}
@@ -15,6 +15,7 @@ export class AssetEditFactory {
static from(dto: AssetEditLike = {}) {
const id = dto.id ?? newUuid();
const updateId = dto.updateId ?? newUuid();
return new AssetEditFactory({
id,
@@ -22,6 +23,8 @@ export class AssetEditFactory {
action: AssetEditAction.Crop,
parameters: { x: 5, y: 6, width: 200, height: 100 },
sequence: 1,
updateId,
updatedAt: newDate(),
...dto,
});
}

View File

@@ -2,171 +2,6 @@ import { AssetFileType, AssetType } from 'src/enum';
import { AssetFileFactory } from 'test/factories/asset-file.factory';
import { userStub } from 'test/fixtures/user.stub';
const updateId = '0d1173e3-4d80-4d76-b41e-57d56de21125';
export const personStub = {
noName: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
hidden: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: true,
isFavorite: false,
color: 'red',
}),
withName: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'assetFaceId',
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
withBirthDate: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: new Date('1976-06-30'),
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
noThumbnail: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
newThumbnail: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/new/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'asset-id',
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
primaryPerson: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
mergePerson: Object.freeze({
id: 'person-2',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 2',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
randomPerson: Object.freeze({
id: 'person-3',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
isFavorite: Object.freeze({
id: 'person-4',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'assetFaceId',
faceAsset: null,
isHidden: false,
isFavorite: true,
color: 'red',
}),
};
export const personThumbnailStub = {
newThumbnailStart: Object.freeze({
ownerId: userStub.admin.id,

View File

@@ -28,7 +28,6 @@ import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { IntegrityRepository } from 'src/repositories/integrity.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
@@ -394,7 +393,6 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
case AssetRepository:
case AssetEditRepository:
case AssetJobRepository:
case IntegrityRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:

View File

@@ -1,3 +1,5 @@
import { AssetEditAction } from 'src/dtos/editing.dto';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
@@ -45,6 +47,27 @@ describe('audit', () => {
});
});
describe('asset_edit_audit', () => {
it('should not cascade asset deletes to asset_edit_audit', async () => {
const assetEditRepo = ctx.get(AssetEditRepository);
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
await ctx.database.deleteFrom('asset').where('id', '=', asset.id).execute();
await expect(
ctx.database.selectFrom('asset_edit_audit').select(['id']).where('assetId', '=', asset.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('assets_audit', () => {
it('should not cascade user deletes to assets_audit', async () => {
const userRepo = ctx.get(UserRepository);

View File

@@ -1,999 +0,0 @@
import { Kysely } from 'kysely';
import { createHash, randomUUID } from 'node:crypto';
import { Readable } from 'node:stream';
import { text } from 'node:stream/consumers';
import { StorageCore } from 'src/cores/storage.core';
import { AssetFileType, IntegrityReportType, JobName, JobStatus } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { IntegrityRepository } from 'src/repositories/integrity.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { DB } from 'src/schema';
import { IntegrityService } from 'src/services/integrity.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB, makeStream } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(IntegrityService, {
database: db || defaultDatabase,
real: [IntegrityRepository, AssetRepository, ConfigRepository, SystemMetadataRepository],
mock: [LoggingRepository, EventRepository, StorageRepository, JobRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
StorageCore.setMediaLocation('/path/to/file');
});
afterAll(() => {
StorageCore.reset();
});
describe(IntegrityService.name, () => {
beforeEach(async () => {
await defaultDatabase.deleteFrom('asset_file').execute();
await defaultDatabase.deleteFrom('asset').execute();
await defaultDatabase.deleteFrom('integrity_report').execute();
});
it('should work', () => {
const { sut } = setup();
expect(sut).toBeDefined();
});
describe('getIntegrityReportSummary', () => {
it('gets summary', async () => {
const { sut } = setup();
await expect(sut.getIntegrityReportSummary()).resolves.toEqual({
checksum_mismatch: 0,
missing_file: 0,
untracked_file: 0,
});
});
});
describe('getIntegrityReport', () => {
it('gets report', async () => {
const { sut } = setup();
await expect(sut.getIntegrityReport({ type: IntegrityReportType.ChecksumFail })).resolves.toEqual({
items: [],
nextCursor: undefined,
});
});
});
describe('getIntegrityReportCsv', () => {
it('gets report as csv', async () => {
const { sut, ctx } = setup();
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file',
});
await expect(text(sut.getIntegrityReportCsv(IntegrityReportType.ChecksumFail))).resolves.toMatchInlineSnapshot(`
"id,type,assetId,fileAssetId,path
${id},checksum_mismatch,null,null,"/path/to/file"
"
`);
});
});
describe('getIntegrityReportFile', () => {
it('gets report file', async () => {
const { sut, ctx } = setup();
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file',
});
await expect(sut.getIntegrityReportFile(id)).resolves.toEqual({
path: '/path/to/file',
fileName: 'file',
contentType: 'application/octet-stream',
cacheControl: 'private_without_cache',
});
});
});
describe('deleteIntegrityReport', () => {
it('deletes asset if one is present', async () => {
const { sut, ctx } = setup();
const events = ctx.getMock(EventRepository);
events.emit.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId });
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file',
assetId,
});
await sut.deleteIntegrityReport(ownerId, id);
await expect(ctx.get(AssetRepository).getById(assetId)).resolves.toEqual(
expect.objectContaining({
status: 'trashed',
}),
);
expect(events.emit).toHaveBeenCalledWith('AssetTrashAll', {
assetIds: [assetId],
userId: ownerId,
});
await expect(sut.getIntegrityReport({ type: IntegrityReportType.ChecksumFail })).resolves.toEqual({
items: [],
nextCursor: undefined,
});
});
it('deletes file asset if one is present', async () => {
const { sut, ctx } = setup();
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId });
const fileAssetId = randomUUID();
await ctx.newAssetFile({ id: fileAssetId, assetId, type: AssetFileType.Thumbnail, path: '/path/to/file' });
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file',
fileAssetId,
});
await sut.deleteIntegrityReport('userId', id);
await expect(ctx.get(AssetRepository).getForThumbnail(assetId, AssetFileType.Thumbnail, false)).resolves.toEqual(
expect.objectContaining({
path: null,
}),
);
});
it('deletes untracked file', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
storage.unlink.mockResolvedValue(void 0);
const {
result: { id: userId },
} = await ctx.newUser();
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file',
});
await sut.deleteIntegrityReport(userId, id);
expect(storage.unlink).toHaveBeenCalledWith('/path/to/file');
});
});
describe('handleUntrackedFilesQueueAll', () => {
it('should succeed', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
storage.walk.mockImplementation(() => makeStream([['/path/to/file', '/path/to/file2'], ['/path/to/batch2']]));
await expect(sut.handleUntrackedFilesQueueAll({ refreshOnly: false })).resolves.toBe(JobStatus.Success);
});
it('queues jobs for all detected files', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
storage.walk.mockReturnValueOnce(makeStream([['/path/to/file', '/path/to/file2'], ['/path/to/batch2']]));
storage.walk.mockReturnValueOnce(makeStream([['/path/to/file3', '/path/to/file4'], ['/path/to/batch4']]));
await sut.handleUntrackedFilesQueueAll({ refreshOnly: false });
expect(job.queue).toBeCalledTimes(4);
expect(job.queue).toBeCalledWith({
name: JobName.IntegrityUntrackedFiles,
data: {
type: 'asset',
paths: expect.arrayContaining(['/path/to/file']),
},
});
expect(job.queue).toBeCalledWith({
name: JobName.IntegrityUntrackedFiles,
data: {
type: 'asset_file',
paths: expect.arrayContaining(['/path/to/file3']),
},
});
});
it('queues jobs to refresh reports', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
storage.walk.mockImplementation(() => makeStream([['/path/to/file', '/path/to/file2'], ['/path/to/batch2']]));
const { id } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.UntrackedFile,
path: '/path/to/file',
});
await sut.handleUntrackedFilesQueueAll({ refreshOnly: true });
expect(job.queue).toBeCalledTimes(1);
expect(job.queue).toBeCalledWith({
name: JobName.IntegrityUntrackedFilesRefresh,
data: {
items: expect.arrayContaining([
{
path: '/path/to/file',
reportId: id,
},
]),
},
});
});
});
describe('handleUntrackedFiles', () => {
it('should detect untracked asset files', async () => {
const { sut, ctx } = setup();
const {
result: { id: ownerId },
} = await ctx.newUser();
await ctx.newAsset({ ownerId, originalPath: '/path/to/file1', encodedVideoPath: '/path/to/file2' });
await sut.handleUntrackedFiles({
type: 'asset',
paths: ['/path/to/file1', '/path/to/file2', '/path/to/untracked'],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.UntrackedFile,
),
).resolves.toEqual({
items: [
expect.objectContaining({
path: '/path/to/untracked',
}),
],
nextCursor: undefined,
});
});
it('should detect untracked asset_file files', async () => {
const { sut, ctx } = setup();
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId });
await ctx.newAssetFile({ assetId, type: AssetFileType.Thumbnail, path: '/path/to/file1' });
await sut.handleUntrackedFiles({
type: 'asset_file',
paths: ['/path/to/file1', '/path/to/untracked'],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.UntrackedFile,
),
).resolves.toEqual({
items: [
expect.objectContaining({
path: '/path/to/untracked',
}),
],
nextCursor: undefined,
});
});
});
describe('handleUntrackedRefresh', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleUntrackedRefresh({ items: [] })).resolves.toBe(JobStatus.Success);
});
it('should delete reports for files that no longer exist', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const storage = ctx.getMock(StorageRepository);
const report1 = await integrity.create({
type: IntegrityReportType.UntrackedFile,
path: '/path/to/missing1',
});
const report2 = await integrity.create({
type: IntegrityReportType.UntrackedFile,
path: '/path/to/existing',
});
storage.stat.mockRejectedValueOnce(new Error('ENOENT')).mockResolvedValueOnce({} as never);
await sut.handleUntrackedRefresh({
items: [
{ reportId: report1.id, path: report1.path },
{ reportId: report2.id, path: report2.path },
],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.UntrackedFile,
),
).resolves.toEqual({
items: [
expect.objectContaining({
path: '/path/to/existing',
}),
],
nextCursor: undefined,
});
});
});
describe('handleMissingFilesQueueAll', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleMissingFilesQueueAll()).resolves.toBe(JobStatus.Success);
});
it('should queue jobs', async () => {
const { sut, ctx } = setup();
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1' });
const {
result: { id: assetId2 },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file2' });
const { id: reportId } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.UntrackedFile,
path: '/path/to/file2',
assetId: assetId2,
});
await sut.handleMissingFilesQueueAll({ refreshOnly: false });
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityMissingFiles,
data: {
items: expect.arrayContaining([
{ path: '/path/to/file1', assetId, fileAssetId: null, reportId: null },
{ path: '/path/to/file2', assetId: assetId2, fileAssetId: null, reportId },
]),
},
});
expect(job.queue).not.toHaveBeenCalledWith(
expect.objectContaining({
name: JobName.IntegrityMissingFilesRefresh,
}),
);
});
it('should queue refresh jobs when refreshOnly is set', async () => {
const { sut, ctx } = setup();
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const { id: reportId } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.MissingFile,
path: '/path/to/file1',
});
await sut.handleMissingFilesQueueAll({ refreshOnly: true });
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityMissingFilesRefresh,
data: {
items: expect.arrayContaining([{ reportId, path: '/path/to/file1' }]),
},
});
expect(job.queue).not.toHaveBeenCalledWith(
expect.objectContaining({
name: JobName.IntegrityMissingFiles,
}),
);
});
});
describe('handleMissingFiles', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleMissingFiles({ items: [] })).resolves.toBe(JobStatus.Success);
});
it('should detect missing files and remove outdated reports', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const storage = ctx.getMock(StorageRepository);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1' });
const { id: restoredId } = await integrity.create({
type: IntegrityReportType.MissingFile,
path: '/path/to/restored',
assetId,
});
storage.stat
.mockResolvedValueOnce({} as never)
.mockRejectedValueOnce(new Error('ENOENT'))
.mockResolvedValueOnce({} as never);
await sut.handleMissingFiles({
items: [
{ path: '/path/to/existing', assetId, fileAssetId: null, reportId: null },
{ path: '/path/to/missing', assetId, fileAssetId: null, reportId: null },
{ path: '/path/to/restored', assetId, fileAssetId: null, reportId: restoredId },
],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.MissingFile,
),
).resolves.toEqual({
items: [
expect.objectContaining({
path: '/path/to/missing',
}),
],
nextCursor: undefined,
});
});
});
describe('handleMissingRefresh', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleMissingRefresh({ items: [] })).resolves.toBe(JobStatus.Success);
});
it('should remove outdated reports', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const storage = ctx.getMock(StorageRepository);
const { id: restoredId } = await integrity.create({
type: IntegrityReportType.MissingFile,
path: '/path/to/restored',
});
storage.stat
.mockResolvedValueOnce({} as never)
.mockRejectedValueOnce(new Error('ENOENT'))
.mockResolvedValueOnce({} as never);
await sut.handleMissingRefresh({
items: [
{ path: '/path/to/existing', reportId: null },
{ path: '/path/to/missing', reportId: null },
{ path: '/path/to/restored', reportId: restoredId },
],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.MissingFile,
),
).resolves.toEqual({
items: [],
nextCursor: undefined,
});
});
});
describe('handleChecksumFiles', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleChecksumFiles({ refreshOnly: false })).resolves.toBe(JobStatus.Success);
});
it('should queue refresh jobs when refreshOnly', async () => {
const { sut, ctx } = setup();
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1', checksum: Buffer.from('a') });
const { id: reportId } = await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file1',
assetId,
});
await sut.handleChecksumFiles({ refreshOnly: true });
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityChecksumFilesRefresh,
data: {
items: [{ reportId, path: '/path/to/file1', checksum: '61' }],
},
});
});
it('should create report for checksum mismatch and delete when fixed', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1', checksum: Buffer.from('mismatched') });
const fileContent1 = Buffer.from('test content');
await ctx.newAsset({
ownerId,
originalPath: '/path/to/file2',
checksum: createHash('sha1').update(fileContent1).digest(),
});
const fileContent2 = Buffer.from('test content 2');
const {
result: { id: assetId3 },
} = await ctx.newAsset({
ownerId,
originalPath: '/path/to/file3',
checksum: createHash('sha1').update(fileContent2).digest(),
});
await ctx.get(IntegrityRepository).create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file3',
assetId: assetId3,
});
storage.createPlainReadStream.mockImplementation((path) =>
Readable.from(
path === '/path/to/file2' ? fileContent1 : path === '/path/to/file3' ? fileContent2 : 'garbage data',
),
);
await sut.handleChecksumFiles({ refreshOnly: false });
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.ChecksumFail,
),
).resolves.toEqual({
items: [
expect.objectContaining({
assetId,
path: '/path/to/file1',
}),
],
nextCursor: undefined,
});
});
it('should skip missing files', async () => {
const { sut, ctx } = setup();
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
await ctx.newAsset({ ownerId, originalPath: '/path/to/file1', checksum: Buffer.from('a') });
const error = new Error('ENOENT') as NodeJS.ErrnoException;
error.code = 'ENOENT';
storage.createPlainReadStream.mockImplementation(() => {
throw error;
});
await sut.handleChecksumFiles({ refreshOnly: false });
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.ChecksumFail,
),
).resolves.toEqual({
items: [],
nextCursor: undefined,
});
});
});
describe('handleChecksumRefresh', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleChecksumRefresh({ items: [] })).resolves.toBe(JobStatus.Success);
});
it('should delete reports when checksum now matches, file is missing, or asset is now missing', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const storage = ctx.getMock(StorageRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const fileContent = Buffer.from('test content');
const correctChecksum = createHash('sha1').update(fileContent).digest();
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: fixedAssetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/fixed', checksum: correctChecksum });
const { id: fixedReportId } = await integrity.create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/fixed',
assetId: fixedAssetId,
});
const {
result: { id: missingAssetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/missing', checksum: Buffer.from('1') });
const { id: missingReportId } = await integrity.create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/missing',
assetId: missingAssetId,
});
const {
result: { id: badAssetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/missing', checksum: Buffer.from('2') });
const { id: badReportId } = await integrity.create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/bad',
assetId: badAssetId,
});
const { id: missingAssetReportId } = await integrity.create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/missing-asset',
});
const error = new Error('ENOENT') as NodeJS.ErrnoException;
error.code = 'ENOENT';
storage.createPlainReadStream
.mockImplementationOnce(() => Readable.from(fileContent))
.mockImplementationOnce(() => {
throw error;
})
.mockImplementationOnce(() => Readable.from(fileContent))
.mockImplementationOnce(() => Readable.from(fileContent));
await sut.handleChecksumRefresh({
items: [
{ reportId: fixedReportId, path: '/path/to/fixed', checksum: correctChecksum.toString('hex') },
{ reportId: missingReportId, path: '/path/to/missing', checksum: 'abc123' },
{ reportId: badReportId, path: '/path/to/bad', checksum: 'wrongchecksum' },
{ reportId: missingAssetReportId, path: '/path/to/missing-asset', checksum: null },
],
});
await expect(
ctx.get(IntegrityRepository).getIntegrityReport(
{
limit: 100,
},
IntegrityReportType.ChecksumFail,
),
).resolves.toEqual({
items: [
expect.objectContaining({
id: badReportId,
assetId: badAssetId,
path: '/path/to/bad',
}),
],
nextCursor: undefined,
});
});
});
describe('handleDeleteAllIntegrityReports', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleDeleteAllIntegrityReports({})).resolves.toBe(JobStatus.Success);
});
it('should queue delete jobs for checksum fail reports', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1' });
const { id: reportId } = await integrity.create({
type: IntegrityReportType.ChecksumFail,
path: '/path/to/file1',
assetId,
});
await sut.handleDeleteAllIntegrityReports({ type: IntegrityReportType.ChecksumFail });
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityDeleteReports,
data: {
reports: [{ id: reportId, assetId, path: '/path/to/file1', fileAssetId: null }],
},
});
});
it('should queue delete jobs for missing file reports by assetId and fileAssetId', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1' });
const { id: assetReportId } = await integrity.create({
type: IntegrityReportType.MissingFile,
path: '/path/to/file1',
assetId,
});
const {
result: { id: assetId2 },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file2' });
const fileAssetId = randomUUID();
await ctx.newAssetFile({
id: fileAssetId,
assetId: assetId2,
path: '/path/to/file3',
type: AssetFileType.Thumbnail,
});
const { id: fileAssetReportId } = await integrity.create({
type: IntegrityReportType.MissingFile,
path: '/path/to/file3',
fileAssetId,
});
await sut.handleDeleteAllIntegrityReports({ type: IntegrityReportType.MissingFile });
expect(job.queue).toHaveBeenCalledTimes(2);
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityDeleteReports,
data: {
reports: [{ id: assetReportId, assetId, path: '/path/to/file1', fileAssetId: null }],
},
});
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityDeleteReports,
data: {
reports: [{ id: fileAssetReportId, assetId: null, path: '/path/to/file3', fileAssetId }],
},
});
});
it('should queue delete jobs for untracked file reports', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const job = ctx.getMock(JobRepository);
job.queue.mockResolvedValue(void 0);
const { id: reportId } = await integrity.create({
type: IntegrityReportType.UntrackedFile,
path: '/path/to/untracked',
});
await sut.handleDeleteAllIntegrityReports({ type: IntegrityReportType.UntrackedFile });
expect(job.queue).toHaveBeenCalledWith({
name: JobName.IntegrityDeleteReports,
data: {
reports: [{ id: reportId, path: '/path/to/untracked', assetId: null, fileAssetId: null }],
},
});
});
});
describe('handleDeleteIntegrityReports', () => {
it('should succeed', async () => {
const { sut } = setup();
await expect(sut.handleDeleteIntegrityReports({ reports: [] })).resolves.toBe(JobStatus.Success);
});
it('should handle all report types', async () => {
const { sut, ctx } = setup();
const integrity = ctx.get(IntegrityRepository);
const storage = ctx.getMock(StorageRepository);
const events = ctx.getMock(EventRepository);
storage.unlink.mockResolvedValue(void 0);
events.emit.mockResolvedValue(void 0);
const {
result: { id: ownerId },
} = await ctx.newUser();
const {
result: { id: assetId1 },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file1' });
const { id: reportId1 } = await integrity.create({
path: '/path/to/file1',
type: IntegrityReportType.ChecksumFail,
assetId: assetId1,
});
const {
result: { id: assetId2 },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file2' });
const { id: reportId2 } = await integrity.create({
path: '/path/to/file2',
type: IntegrityReportType.MissingFile,
assetId: assetId2,
});
const {
result: { id: assetId3 },
} = await ctx.newAsset({ ownerId, originalPath: '/path/to/file3' });
const fileAssetId = randomUUID();
await ctx.newAssetFile({
id: fileAssetId,
assetId: assetId3,
path: '/path/to/file4',
type: AssetFileType.Thumbnail,
});
const { id: reportId3 } = await integrity.create({
path: '/path/to/file4',
type: IntegrityReportType.MissingFile,
fileAssetId,
});
const { id: reportId4 } = await integrity.create({
path: '/path/to/untracked',
type: IntegrityReportType.UntrackedFile,
});
await sut.handleDeleteIntegrityReports({
reports: [
{ id: reportId1, assetId: assetId1, fileAssetId: null, path: '/path/to/file1' },
{ id: reportId2, assetId: assetId2, fileAssetId: null, path: '/path/to/file2' },
{ id: reportId3, assetId: null, fileAssetId, path: '/path/to/file4' },
{ id: reportId4, assetId: null, fileAssetId: null, path: '/path/to/untracked' },
],
});
expect(events.emit).toHaveBeenCalledWith('AssetTrashAll', {
assetIds: [assetId1, assetId2],
userId: '',
});
expect(storage.unlink).toHaveBeenCalledWith('/path/to/untracked');
await expect(ctx.get(AssetRepository).getByIds([assetId1, assetId2, assetId3])).resolves.toEqual(
expect.arrayContaining([
expect.objectContaining({
id: assetId1,
status: 'trashed',
}),
expect.objectContaining({
id: assetId2,
status: 'trashed',
}),
expect.objectContaining({
id: assetId3,
status: 'active',
}),
]),
);
await expect(defaultDatabase.selectFrom('asset_file').execute()).resolves.toEqual([]);
await expect(defaultDatabase.selectFrom('integrity_report').execute()).resolves.toEqual([]);
});
});
});

View File

@@ -1,9 +1,10 @@
import { schemaFromCode } from '@immich/sql-tools';
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { BaseSync, SyncRepository } from 'src/repositories/sync.repository';
import { DB } from 'src/schema';
import { SyncService } from 'src/services/sync.service';
import { newMediumService } from 'test/medium.factory';
@@ -222,5 +223,21 @@ describe(SyncService.name, () => {
expect(after).toHaveLength(1);
expect(after[0].id).toBe(keep.id);
});
it('should cleanup every table', async () => {
const { sut } = setup();
const auditTables = schemaFromCode()
.tables.filter((table) => table.name.endsWith('_audit'))
.map(({ name }) => name);
const auditCleanupSpy = vi.spyOn(BaseSync.prototype as any, 'auditCleanup');
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
expect(auditCleanupSpy).toHaveBeenCalledTimes(auditTables.length);
for (const table of auditTables) {
expect(auditCleanupSpy, `Audit table ${table} was not cleaned up`).toHaveBeenCalledWith(table, 31);
}
});
});
});

View File

@@ -0,0 +1,300 @@
import { Kysely } from 'kysely';
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AssetEditsV1, () => {
it('should detect and sync the first asset edit', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const assetEditRepo = ctx.get(AssetEditRepository);
await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset.id,
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
sequence: 0,
},
type: SyncEntityType.AssetEditV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should detect and sync multiple asset edits for the same asset', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const assetEditRepo = ctx.get(AssetEditRepository);
await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
{
action: AssetEditAction.Rotate,
parameters: { angle: 90 },
},
{
action: AssetEditAction.Mirror,
parameters: { axis: MirrorAxis.Horizontal },
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset.id,
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
sequence: 0,
},
type: SyncEntityType.AssetEditV1,
},
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset.id,
action: AssetEditAction.Rotate,
parameters: { angle: 90 },
sequence: 1,
},
type: SyncEntityType.AssetEditV1,
},
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset.id,
action: AssetEditAction.Mirror,
parameters: { axis: MirrorAxis.Horizontal },
sequence: 2,
},
type: SyncEntityType.AssetEditV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should detect and sync updated edits', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const assetEditRepo = ctx.get(AssetEditRepository);
// Create initial edit
const edits = await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
const response1 = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
await ctx.syncAckAll(auth, response1);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
// update the existing edit
await ctx.database
.updateTable('asset_edit')
.set({
parameters: { x: 50, y: 60, width: 150, height: 250 },
})
.where('id', '=', edits[0].id)
.execute();
const response2 = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
expect(response2).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset.id,
action: AssetEditAction.Crop,
parameters: { x: 50, y: 60, width: 150, height: 250 },
sequence: 0,
},
type: SyncEntityType.AssetEditV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response2);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should detect and sync deleted asset edits', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const assetEditRepo = ctx.get(AssetEditRepository);
// Create initial edit
const edits = await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
const response1 = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
await ctx.syncAckAll(auth, response1);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
// Delete all edits
await assetEditRepo.replaceAll(asset.id, []);
const response2 = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
expect(response2).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
editId: edits[0].id,
},
type: SyncEntityType.AssetEditDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response2);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should only sync asset edits for own user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const assetEditRepo = ctx.get(AssetEditRepository);
const { session } = await ctx.newSession({ userId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
// User 2 should see their own edit
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetEditsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetEditV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// User 1 should not see user 2's edit
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should sync edits for multiple assets', async () => {
const { auth, ctx } = await setup();
const { asset: asset1 } = await ctx.newAsset({ ownerId: auth.user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: auth.user.id });
const assetEditRepo = ctx.get(AssetEditRepository);
await assetEditRepo.replaceAll(asset1.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
await assetEditRepo.replaceAll(asset2.id, [
{
action: AssetEditAction.Rotate,
parameters: { angle: 270 },
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetEditsV1]);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset1.id,
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
sequence: 0,
},
type: SyncEntityType.AssetEditV1,
},
{
ack: expect.any(String),
data: {
id: expect.any(String),
assetId: asset2.id,
action: AssetEditAction.Rotate,
parameters: { angle: 270 },
sequence: 0,
},
type: SyncEntityType.AssetEditV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
it('should not sync edits for partner assets', async () => {
const { auth, ctx } = await setup();
const { user: partner } = await ctx.newUser();
await ctx.newPartner({ sharedById: partner.id, sharedWithId: auth.user.id });
const { asset } = await ctx.newAsset({ ownerId: partner.id });
const assetEditRepo = ctx.get(AssetEditRepository);
await assetEditRepo.replaceAll(asset.id, [
{
action: AssetEditAction.Crop,
parameters: { x: 10, y: 20, width: 100, height: 200 },
},
]);
// Should not see partner's asset edits in own sync
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetEditsV1]);
});
});

View File

@@ -48,8 +48,8 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
return {
createZipStream: vitest.fn(),
createPlainReadStream: vitest.fn(),
createReadStream: vitest.fn(),
createPlainReadStream: vitest.fn(),
createGzip: vitest.fn(),
createGunzip: vitest.fn(),
readFile: vitest.fn(),

View File

@@ -32,7 +32,6 @@ import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { IntegrityRepository } from 'src/repositories/integrity.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LibraryRepository } from 'src/repositories/library.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -227,7 +226,6 @@ export type ServiceOverrides = {
duplicateRepository: DuplicateRepository;
email: EmailRepository;
event: EventRepository;
integrityReport: IntegrityRepository;
job: JobRepository;
library: LibraryRepository;
logger: LoggingRepository;
@@ -310,7 +308,6 @@ export const getMocks = () => {
email: automock(EmailRepository, { args: [loggerMock] }),
// eslint-disable-next-line no-sparse-arrays
event: automock(EventRepository, { args: [, , loggerMock], strict: false }),
integrityReport: automock(IntegrityRepository, { strict: false }),
job: newJobRepositoryMock(),
apiKey: automock(ApiKeyRepository),
library: automock(LibraryRepository, { strict: false }),
@@ -380,7 +377,6 @@ export const newTestService = <T extends BaseService>(
overrides.duplicateRepository || (mocks.duplicateRepository as As<DuplicateRepository>),
overrides.email || (mocks.email as As<EmailRepository>),
overrides.event || (mocks.event as As<EventRepository>),
overrides.integrityReport || (mocks.integrityReport as As<IntegrityRepository>),
overrides.job || (mocks.job as As<JobRepository>),
overrides.library || (mocks.library as As<LibraryRepository>),
overrides.machineLearning || (mocks.machineLearning as As<MachineLearningRepository>),
@@ -571,7 +567,7 @@ export const mockFork = vitest.fn((exitCode: number, stdout: string, stderr: str
} as unknown as ChildProcessWithoutNullStreams;
});
export async function* makeStream<T>(items: T[] = []): AsyncGenerator<T> {
export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T> {
for (const item of items) {
await Promise.resolve();
yield item;

View File

@@ -17,10 +17,9 @@
const rateAsset = async (rating: number | null) => {
try {
const updateAssetDto = rating === null ? {} : { rating };
await updateAsset({
id: asset.id,
updateAssetDto,
updateAssetDto: { rating },
});
asset = {

View File

@@ -1,5 +1,5 @@
<script lang="ts">
import StarRating from '$lib/elements/StarRating.svelte';
import StarRating, { type Rating } from '$lib/elements/StarRating.svelte';
import { authManager } from '$lib/managers/auth-manager.svelte';
import { preferences } from '$lib/stores/user.store';
import { handlePromiseError } from '$lib/utils';
@@ -14,9 +14,9 @@
let { asset, isOwner }: Props = $props();
let rating = $derived(asset.exifInfo?.rating || 0);
let rating = $derived(asset.exifInfo?.rating || null) as Rating;
const handleChangeRating = async (rating: number) => {
const handleChangeRating = async (rating: number | null) => {
try {
await updateAsset({ id: asset.id, updateAssetDto: { rating } });
} catch (error) {
@@ -26,7 +26,7 @@
</script>
{#if !authManager.isSharedLink && $preferences?.ratings.enabled}
<section class="px-4 pt-2">
<section class="px-4 pt-4">
<StarRating {rating} readOnly={!isOwner} onRating={(rating) => handlePromiseError(handleChangeRating(rating))} />
</section>
{/if}

View File

@@ -1,41 +0,0 @@
<script lang="ts">
import OnEvents from '$lib/components/OnEvents.svelte';
import { getIntegrityReportItemActions } from '$lib/services/integrity.service';
import type { IntegrityReportType } from '@immich/sdk';
import { ContextMenuButton, TableCell, TableRow } from '@immich/ui';
import { t } from 'svelte-i18n';
type Props = {
id: string;
path: string;
reportType: IntegrityReportType;
};
let { id, path, reportType }: Props = $props();
let deleting = $state(false);
const { Download, Delete } = $derived(getIntegrityReportItemActions($t, id, reportType));
const onIntegrityReportDeleteStatus = ({
id: reportId,
type,
isDeleting,
}: {
id?: string;
type?: IntegrityReportType;
isDeleting: boolean;
}) => {
if (type === reportType || reportId === id) {
deleting = isDeleting;
}
};
</script>
<OnEvents {onIntegrityReportDeleteStatus} />
<TableRow>
<TableCell class="w-7/8 text-left px-4">{path}</TableCell>
<TableCell class="w-1/8 flex justify-end">
<ContextMenuButton disabled={deleting} position="top-right" aria-label={$t('open')} items={[Download, Delete]} />
</TableCell>
</TableRow>

Some files were not shown because too many files have changed in this diff Show More