chore(server): sidecars in asset_files (#21199)

* fix: sidecar check job

* feat: move sidecars to asset_files

* feat: combine with handleSidecarCheck

* fix(server): improved method signatures for stack and sidecar copying

* fix(server): improved method signatures for stack and sidecar copying

* chore: clean up

---------

Co-authored-by: Jason Rasmussen <jason@rasm.me>
This commit is contained in:
Jonathan Jogenfors
2025-12-02 19:31:43 +01:00
committed by GitHub
parent 62628dfcfa
commit 1bcf28c062
26 changed files with 425 additions and 154 deletions

View File

@@ -1006,7 +1006,7 @@ describe('/libraries', () => {
rmSync(`${testAssetDir}/temp/xmp`, { recursive: true, force: true });
});
it('should switch from using file metadata to file.xmp metadata when asset refreshes', async () => {
it('should switch from using file metadata to file.ext.xmp metadata when asset refreshes', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/xmp`],

View File

@@ -305,7 +305,7 @@ export class StorageCore {
return this.assetRepository.update({ id, encodedVideoPath: newPath });
}
case AssetPathType.Sidecar: {
return this.assetRepository.update({ id, sidecarPath: newPath });
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
}
case PersonPathType.Face: {
return this.personRepository.update({ id, thumbnailPath: newPath });

View File

@@ -122,7 +122,6 @@ export type Asset = {
originalFileName: string;
originalPath: string;
ownerId: string;
sidecarPath: string | null;
type: AssetType;
};
@@ -156,13 +155,6 @@ export type StorageAsset = {
encodedVideoPath: string | null;
};
export type SidecarWriteAsset = {
id: string;
sidecarPath: string | null;
originalPath: string;
tags: Array<{ value: string }>;
};
export type Stack = {
id: string;
primaryAssetId: string;
@@ -347,7 +339,6 @@ export const columns = {
'asset.originalFileName',
'asset.originalPath',
'asset.ownerId',
'asset.sidecarPath',
'asset.type',
],
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],

View File

@@ -124,7 +124,6 @@ export type MapAsset = {
originalPath: string;
owner?: User | null;
ownerId: string;
sidecarPath: string | null;
stack?: Stack | null;
stackId: string | null;
tags?: Tag[];

View File

@@ -44,6 +44,7 @@ export enum AssetFileType {
FullSize = 'fullsize',
Preview = 'preview',
Thumbnail = 'thumbnail',
Sidecar = 'sidecar',
}
export enum AlbumUserRole {

View File

@@ -20,8 +20,23 @@ limit
-- AssetJobRepository.getForSidecarWriteJob
select
"id",
"sidecarPath",
"originalPath",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files",
(
select
coalesce(json_agg(agg), '[]')
@@ -39,21 +54,36 @@ select
from
"asset"
where
"asset"."id" = $1::uuid
"asset"."id" = $2::uuid
limit
$2
$3
-- AssetJobRepository.getForSidecarCheckJob
select
"id",
"sidecarPath",
"originalPath"
"originalPath",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
from
"asset"
where
"asset"."id" = $1::uuid
"asset"."id" = $2::uuid
limit
$2
$3
-- AssetJobRepository.streamForThumbnailJob
select
@@ -158,7 +188,6 @@ select
"asset"."originalFileName",
"asset"."originalPath",
"asset"."ownerId",
"asset"."sidecarPath",
"asset"."type",
(
select
@@ -173,11 +202,27 @@ select
"asset_face"."assetId" = "asset"."id"
and "asset_face"."deletedAt" is null
) as agg
) as "faces"
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
from
"asset"
where
"asset"."id" = $1
"asset"."id" = $2
-- AssetJobRepository.getAlbumThumbnailFiles
select
@@ -322,7 +367,6 @@ select
"asset"."libraryId",
"asset"."ownerId",
"asset"."livePhotoVideoId",
"asset"."sidecarPath",
"asset"."encodedVideoPath",
"asset"."originalPath",
to_json("asset_exif") as "exifInfo",
@@ -433,18 +477,33 @@ select
"asset"."checksum",
"asset"."originalPath",
"asset"."isExternal",
"asset"."sidecarPath",
"asset"."originalFileName",
"asset"."livePhotoVideoId",
"asset"."fileCreatedAt",
"asset_exif"."timeZone",
"asset_exif"."fileSizeInByte"
"asset_exif"."fileSizeInByte",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."deletedAt" is null
and "asset"."id" = $1
and "asset"."id" = $2
-- AssetJobRepository.streamForStorageTemplateJob
select
@@ -454,12 +513,27 @@ select
"asset"."checksum",
"asset"."originalPath",
"asset"."isExternal",
"asset"."sidecarPath",
"asset"."originalFileName",
"asset"."livePhotoVideoId",
"asset"."fileCreatedAt",
"asset_exif"."timeZone",
"asset_exif"."fileSizeInByte"
"asset_exif"."fileSizeInByte",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
@@ -481,11 +555,15 @@ select
from
"asset"
where
(
"asset"."sidecarPath" = $1
or "asset"."sidecarPath" is null
not exists (
select
"asset_file"."id"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
)
and "asset"."visibility" != $2
-- AssetJobRepository.streamForDetectFacesJob
select

View File

@@ -216,6 +216,34 @@ from
limit
3
-- AssetRepository.getForCopy
select
"id",
"stackId",
"originalPath",
"isFavorite",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
) as agg
) as "files"
from
"asset"
where
"id" = $1::uuid
limit
$2
-- AssetRepository.getById
select
"asset".*

View File

@@ -6,7 +6,6 @@ import { Asset, columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema';
import { StorageAsset } from 'src/types';
import {
anyUuid,
asUuid,
@@ -40,7 +39,8 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'sidecarPath', 'originalPath'])
.select(['id', 'originalPath'])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.select((eb) =>
jsonArrayFrom(
eb
@@ -59,7 +59,8 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'sidecarPath', 'originalPath'])
.select(['id', 'originalPath'])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.limit(1)
.executeTakeFirst();
}
@@ -122,6 +123,7 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(columns.asset)
.select(withFaces)
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.where('asset.id', '=', id)
.executeTakeFirst();
}
@@ -228,7 +230,6 @@ export class AssetJobRepository {
'asset.libraryId',
'asset.ownerId',
'asset.livePhotoVideoId',
'asset.sidecarPath',
'asset.encodedVideoPath',
'asset.originalPath',
])
@@ -306,26 +307,24 @@ export class AssetJobRepository {
'asset.checksum',
'asset.originalPath',
'asset.isExternal',
'asset.sidecarPath',
'asset.originalFileName',
'asset.livePhotoVideoId',
'asset.fileCreatedAt',
'asset_exif.timeZone',
'asset_exif.fileSizeInByte',
])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.where('asset.deletedAt', 'is', null);
}
@GenerateSql({ params: [DummyValue.UUID] })
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst() as Promise<
StorageAsset | undefined
>;
getForStorageTemplateJob(id: string) {
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForStorageTemplateJob() {
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
return this.storageTemplateAssetQuery().stream();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
@@ -343,9 +342,18 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id'])
.$if(!force, (qb) =>
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
qb.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom('asset_file')
.select('asset_file.id')
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', AssetFileType.Sidecar),
),
),
),
)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.stream();
}

View File

@@ -396,6 +396,17 @@ export class AssetRepository {
return this.db.selectFrom('asset_file').select(['assetId', 'path']).limit(sql.lit(3)).execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForCopy(id: string) {
return this.db
.selectFrom('asset')
.select(['id', 'stackId', 'originalPath', 'isFavorite'])
.select(withFiles)
.where('id', '=', asUuid(id))
.limit(1)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
return this.db
@@ -842,6 +853,10 @@ export class AssetRepository {
.execute();
}
async deleteFile({ assetId, type }: { assetId: string; type: AssetFileType }): Promise<void> {
await this.db.deleteFrom('asset_file').where('assetId', '=', asUuid(assetId)).where('type', '=', type).execute();
}
async deleteFiles(files: Pick<Selectable<AssetFileTable>, 'id'>[]): Promise<void> {
if (files.length === 0) {
return;

View File

@@ -403,7 +403,6 @@ export class DatabaseRepository {
.set((eb) => ({
originalPath: eb.fn('REGEXP_REPLACE', ['originalPath', source, target]),
encodedVideoPath: eb.fn('REGEXP_REPLACE', ['encodedVideoPath', source, target]),
sidecarPath: eb.fn('REGEXP_REPLACE', ['sidecarPath', source, target]),
}))
.execute();

View File

@@ -0,0 +1,24 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`INSERT INTO "asset_file" ("assetId", "path", "type")
SELECT
id, "sidecarPath", 'sidecar'
FROM "asset"
WHERE "sidecarPath" IS NOT NULL AND "sidecarPath" != '';`.execute(db);
await sql`ALTER TABLE "asset" DROP COLUMN "sidecarPath";`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset" ADD "sidecarPath" character varying;`.execute(db);
await sql`
UPDATE "asset"
SET "sidecarPath" = "asset_file"."path"
FROM "asset_file"
WHERE "asset"."id" = "asset_file"."assetId" AND "asset_file"."type" = 'sidecar';
`.execute(db);
await sql`DELETE FROM "asset_file" WHERE "type" = 'sidecar';`.execute(db);
}

View File

@@ -105,9 +105,6 @@ export class AssetTable {
@Column({ index: true })
originalFileName!: string;
@Column({ nullable: true })
sidecarPath!: string | null;
@Column({ type: 'bytea', nullable: true })
thumbhash!: Buffer | null;

View File

@@ -174,7 +174,6 @@ const assetEntity = Object.freeze({
longitude: 10.703_075,
},
livePhotoVideoId: null,
sidecarPath: null,
} as MapAsset);
const existingAsset = Object.freeze({
@@ -188,7 +187,6 @@ const existingAsset = Object.freeze({
const sidecarAsset = Object.freeze({
...existingAsset,
sidecarPath: 'sidecar-path',
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
}) as MapAsset;
@@ -721,18 +719,22 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
id: existingAsset.id,
sidecarPath: null,
originalFileName: 'photo1.jpeg',
originalPath: 'fake_path/photo1.jpeg',
}),
);
expect(mocks.asset.create).toHaveBeenCalledWith(
expect.objectContaining({
sidecarPath: null,
originalFileName: 'existing-filename.jpeg',
originalPath: 'fake_path/asset_1.jpeg',
}),
);
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
deletedAt: expect.any(Date),
@@ -769,6 +771,13 @@ describe(AssetMediaService.name, () => {
deletedAt: expect.any(Date),
status: AssetStatus.Trashed,
});
expect(mocks.asset.upsertFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
path: sidecarFile.originalPath,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
updatedFile.originalPath,
@@ -798,6 +807,12 @@ describe(AssetMediaService.name, () => {
deletedAt: expect.any(Date),
status: AssetStatus.Trashed,
});
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
updatedFile.originalPath,
@@ -827,6 +842,9 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.create).not.toHaveBeenCalled();
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: { files: [updatedFile.originalPath, undefined] },

View File

@@ -21,7 +21,16 @@ import {
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
import {
AssetFileType,
AssetStatus,
AssetType,
AssetVisibility,
CacheControl,
JobName,
Permission,
StorageFolder,
} from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { BaseService } from 'src/services/base.service';
import { UploadFile, UploadRequest } from 'src/types';
@@ -354,9 +363,12 @@ export class AssetMediaService extends BaseService {
duration: dto.duration || null,
livePhotoVideoId: null,
sidecarPath: sidecarPath || null,
});
await (sidecarPath
? this.assetRepository.upsertFile({ assetId, type: AssetFileType.Sidecar, path: sidecarPath })
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
await this.jobRepository.queue({
@@ -384,7 +396,6 @@ export class AssetMediaService extends BaseService {
localDateTime: asset.localDateTime,
fileModifiedAt: asset.fileModifiedAt,
livePhotoVideoId: asset.livePhotoVideoId,
sidecarPath: asset.sidecarPath,
});
const { size } = await this.storageRepository.stat(created.originalPath);
@@ -414,7 +425,6 @@ export class AssetMediaService extends BaseService {
visibility: dto.visibility ?? AssetVisibility.Timeline,
livePhotoVideoId: dto.livePhotoVideoId,
originalFileName: dto.filename || file.originalName,
sidecarPath: sidecarFile?.originalPath,
});
if (dto.metadata) {
@@ -422,6 +432,11 @@ export class AssetMediaService extends BaseService {
}
if (sidecarFile) {
await this.assetRepository.upsertFile({
assetId: asset.id,
path: sidecarFile.originalPath,
type: AssetFileType.Sidecar,
});
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
}
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));

View File

@@ -585,8 +585,8 @@ describe(AssetService.name, () => {
'/uploads/user-id/webp/path.ext',
'/uploads/user-id/thumbs/path.jpg',
'/uploads/user-id/fullsize/path.webp',
assetWithFace.encodedVideoPath,
assetWithFace.sidecarPath,
assetWithFace.encodedVideoPath, // this value is null
undefined, // no sidecar path
assetWithFace.originalPath,
],
},

View File

@@ -2,6 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
import _ from 'lodash';
import { DateTime, Duration } from 'luxon';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { AssetFile } from 'src/database';
import { OnJob } from 'src/decorators';
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import {
@@ -18,7 +19,16 @@ import {
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
import {
AssetFileType,
AssetMetadataKey,
AssetStatus,
AssetVisibility,
JobName,
JobStatus,
Permission,
QueueName,
} from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
import { requireElevatedPermission } from 'src/utils/access';
@@ -197,8 +207,8 @@ export class AssetService extends BaseService {
}: AssetCopyDto,
) {
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
const sourceAsset = await this.assetRepository.getById(sourceId);
const targetAsset = await this.assetRepository.getById(targetId);
const sourceAsset = await this.assetRepository.getForCopy(sourceId);
const targetAsset = await this.assetRepository.getForCopy(targetId);
if (!sourceAsset || !targetAsset) {
throw new BadRequestException('Both assets must exist');
@@ -252,19 +262,25 @@ export class AssetService extends BaseService {
sourceAsset,
targetAsset,
}: {
sourceAsset: { sidecarPath: string | null };
targetAsset: { id: string; sidecarPath: string | null; originalPath: string };
sourceAsset: { files: AssetFile[] };
targetAsset: { id: string; files: AssetFile[]; originalPath: string };
}) {
if (!sourceAsset.sidecarPath) {
const { sidecarFile: sourceFile } = getAssetFiles(sourceAsset.files);
if (!sourceFile?.path) {
return;
}
if (targetAsset.sidecarPath) {
await this.storageRepository.unlink(targetAsset.sidecarPath);
const { sidecarFile: targetFile } = getAssetFiles(targetAsset.files ?? []);
if (targetFile?.path) {
await this.storageRepository.unlink(targetFile.path);
}
await this.storageRepository.copyFile(sourceAsset.sidecarPath, `${targetAsset.originalPath}.xmp`);
await this.assetRepository.update({ id: targetAsset.id, sidecarPath: `${targetAsset.originalPath}.xmp` });
await this.storageRepository.copyFile(sourceFile.path, `${targetAsset.originalPath}.xmp`);
await this.assetRepository.upsertFile({
assetId: targetAsset.id,
path: `${targetAsset.originalPath}.xmp`,
type: AssetFileType.Sidecar,
});
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: targetAsset.id } });
}
@@ -344,11 +360,11 @@ export class AssetService extends BaseService {
}
}
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
if (deleteOnDisk) {
files.push(asset.sidecarPath, asset.originalPath);
files.push(sidecarFile?.path, asset.originalPath);
}
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });

View File

@@ -4,7 +4,16 @@ import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
import {
AssetFileType,
AssetType,
AssetVisibility,
ExifOrientation,
ImmichWorker,
JobName,
JobStatus,
SourceType,
} from 'src/enum';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -15,17 +24,24 @@ import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const removeNonSidecarFiles = (asset: any) => {
return {
...asset,
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
};
};
const forSidecarJob = (
asset: {
id?: string;
originalPath?: string;
sidecarPath?: string | null;
files?: { id: string; type: AssetFileType; path: string }[];
} = {},
) => {
return {
id: factory.uuid(),
originalPath: '/path/to/IMG_123.jpg',
sidecarPath: null,
files: [],
...asset,
};
};
@@ -166,7 +182,7 @@ describe(MetadataService.name, () => {
it('should handle a date in a sidecar file', async () => {
const originalDate = new Date('2023-11-21T16:13:17.517Z');
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -185,7 +201,7 @@ describe(MetadataService.name, () => {
it('should take the file modification date when missing exif and earlier than creation date', async () => {
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -211,7 +227,7 @@ describe(MetadataService.name, () => {
it('should take the file creation date when missing exif and earlier than modification date', async () => {
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -234,7 +250,7 @@ describe(MetadataService.name, () => {
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
process.env.TZ = 'America/Los_Angeles';
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -252,7 +268,7 @@ describe(MetadataService.name, () => {
});
it('should handle lists of numbers', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.image.fileModifiedAt,
@@ -305,7 +321,7 @@ describe(MetadataService.name, () => {
});
it('should apply reverse geocoding', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
mocks.storage.stat.mockResolvedValue({
@@ -334,7 +350,7 @@ describe(MetadataService.name, () => {
});
it('should discard latitude and longitude on null island', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
mockReadTags({
GPSLatitude: 0,
GPSLongitude: 0,
@@ -346,7 +362,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ TagsList: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -356,7 +372,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchy from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -376,7 +392,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a string', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ Keywords: 'Parent' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -386,7 +402,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a list', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ Keywords: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -396,7 +412,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ Keywords: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -407,7 +423,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchal tags from Keywords', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ Keywords: 'Parent/Child' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -426,7 +442,7 @@ describe(MetadataService.name, () => {
});
it('should ignore Keywords when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -445,7 +461,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchy from HierarchicalSubject', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -466,7 +482,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -1030,8 +1046,15 @@ describe(MetadataService.name, () => {
it('should prefer Duration from exif over sidecar', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
...assetStub.image,
sidecarPath: '/path/to/something',
files: [
{
id: 'some-id',
type: AssetFileType.Sidecar,
path: '/path/to/something',
},
],
});
mockReadTags({ Duration: 123 }, { Duration: 456 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -1536,18 +1559,25 @@ describe(MetadataService.name, () => {
});
it('should detect a new sidecar at .jpg.xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', files: [] });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: asset.id,
type: AssetFileType.Sidecar,
path: '/path/to/IMG_123.jpg.xmp',
});
});
it('should detect a new sidecar at .xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [],
});
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
@@ -1555,33 +1585,44 @@ describe(MetadataService.name, () => {
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: asset.id,
type: AssetFileType.Sidecar,
path: '/path/to/IMG_123.xmp',
});
});
it('should unset sidecar path if file does not exist anymore', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
it('should unset sidecar path if file no longer exist', async () => {
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
});
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
expect(mocks.asset.deleteFile).toHaveBeenCalledWith({ assetId: asset.id, type: AssetFileType.Sidecar });
});
it('should do nothing if the sidecar file still exists', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
});
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
});
});
describe('handleSidecarWrite', () => {
it('should skip assets that do not exist anymore', async () => {
it('should skip assets that no longer exist', async () => {
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
@@ -1610,7 +1651,7 @@ describe(MetadataService.name, () => {
dateTimeOriginal: date,
}),
).resolves.toBe(JobStatus.Success);
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, {
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, {
Description: description,
ImageDescription: description,
DateTimeOriginal: date,

View File

@@ -8,9 +8,10 @@ import { constants } from 'node:fs/promises';
import { join, parse } from 'node:path';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { Asset, AssetFace } from 'src/database';
import { Asset, AssetFace, AssetFile } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import {
AssetFileType,
AssetType,
AssetVisibility,
DatabaseLock,
@@ -29,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
import { PersonTable } from 'src/schema/tables/person.table';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { getAssetFiles } from 'src/utils/asset.util';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { isFaceImportEnabled } from 'src/utils/misc';
import { upsertTags } from 'src/utils/tag';
@@ -359,17 +361,21 @@ export class MetadataService extends BaseService {
break;
}
const isChanged = sidecarPath !== asset.sidecarPath;
const { sidecarFile } = getAssetFiles(asset.files);
const isChanged = sidecarPath !== sidecarFile?.path;
this.logger.debug(
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
`Sidecar check found old=${sidecarFile?.path}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
);
if (!isChanged) {
return JobStatus.Skipped;
}
await this.assetRepository.update({ id: asset.id, sidecarPath });
await (sidecarPath === null
? this.assetRepository.deleteFile({ assetId: asset.id, type: AssetFileType.Sidecar })
: this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }));
return JobStatus.Success;
}
@@ -394,7 +400,9 @@ export class MetadataService extends BaseService {
const tagsList = (asset.tags || []).map((tag) => tag.value);
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
const { sidecarFile } = getAssetFiles(asset.files);
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
const exif = _.omitBy(
<Tags>{
Description: description,
@@ -414,18 +422,19 @@ export class MetadataService extends BaseService {
await this.metadataRepository.writeTags(sidecarPath, exif);
if (!asset.sidecarPath) {
await this.assetRepository.update({ id, sidecarPath });
if (asset.files.length === 0) {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
}
return JobStatus.Success;
}
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
private getSidecarCandidates({ files, originalPath }: { files: AssetFile[]; originalPath: string }) {
const candidates: string[] = [];
if (sidecarPath) {
candidates.push(sidecarPath);
const { sidecarFile } = getAssetFiles(files);
if (sidecarFile?.path) {
candidates.push(sidecarFile.path);
}
const assetPath = parse(originalPath);
@@ -456,14 +465,12 @@ export class MetadataService extends BaseService {
return { width, height };
}
private async getExifTags(asset: {
originalPath: string;
sidecarPath: string | null;
type: AssetType;
}): Promise<ImmichTags> {
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
const { sidecarFile } = getAssetFiles(asset.files);
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
]);

View File

@@ -6,10 +6,20 @@ import sanitize from 'sanitize-filename';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
import {
AssetFileType,
AssetPathType,
AssetType,
DatabaseLock,
JobName,
JobStatus,
QueueName,
StorageFolder,
} from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { JobOf, StorageAsset } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { getLivePhotoMotionFilename } from 'src/utils/file';
const storageTokens = {
@@ -196,7 +206,7 @@ export class StorageTemplateService extends BaseService {
}
return this.databaseRepository.withLock(DatabaseLock.StorageTemplateMigration, async () => {
const { id, sidecarPath, originalPath, checksum, fileSizeInByte } = asset;
const { id, originalPath, checksum, fileSizeInByte } = asset;
const oldPath = originalPath;
const newPath = await this.getTemplatePath(asset, metadata);
@@ -213,6 +223,8 @@ export class StorageTemplateService extends BaseService {
newPath,
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
});
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar)?.path;
if (sidecarPath) {
await this.storageCore.moveFile({
entityId: id,

View File

@@ -1,6 +1,6 @@
import { SystemConfig } from 'src/config';
import { VECTOR_EXTENSIONS } from 'src/constants';
import { Asset } from 'src/database';
import { Asset, AssetFile } from 'src/database';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
@@ -475,8 +475,8 @@ export type StorageAsset = {
fileCreatedAt: Date;
originalPath: string;
originalFileName: string;
sidecarPath: string | null;
fileSizeInByte: number | null;
files: AssetFile[];
};
export type OnThisDayData = { year: number };

View File

@@ -21,6 +21,7 @@ export const getAssetFiles = (files: AssetFile[]) => ({
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
previewFile: getAssetFile(files, AssetFileType.Preview),
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
});
export const addAssets = async (

View File

@@ -24,6 +24,18 @@ const fullsizeFile: AssetFile = {
path: '/uploads/user-id/fullsize/path.webp',
};
const sidecarFileWithExt: AssetFile = {
id: 'sidecar-with-ext',
type: AssetFileType.Sidecar,
path: '/original/path.ext.xmp',
};
const sidecarFileWithoutExt: AssetFile = {
id: 'sidecar-without-ext',
type: AssetFileType.Sidecar,
path: '/original/path.xmp',
};
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
@@ -51,8 +63,8 @@ export const assetStub = {
fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'),
originalPath: '/original/path.jpg',
originalFileName: 'IMG_123.jpg',
sidecarPath: null,
fileSizeInByte: 12_345,
files: [],
...asset,
}),
noResizePath: Object.freeze({
@@ -81,7 +93,6 @@ export const assetStub = {
sharedLinks: [],
faces: [],
exifInfo: {} as Exif,
sidecarPath: null,
deletedAt: null,
isExternal: false,
duplicateId: null,
@@ -117,7 +128,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'IMG_456.jpg',
faces: [],
sidecarPath: null,
isExternal: false,
exifInfo: {
fileSizeInByte: 123_000,
@@ -157,7 +167,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -194,7 +203,6 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 1000,
@@ -243,7 +251,6 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -285,7 +292,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -328,7 +334,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -367,7 +372,6 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -409,7 +413,6 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -448,7 +451,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -490,7 +492,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -526,7 +527,6 @@ export const assetStub = {
livePhotoVideoId: null,
sharedLinks: [],
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 100_000,
exifImageHeight: 2160,
@@ -553,6 +553,7 @@ export const assetStub = {
fileSizeInByte: 100_000,
timeZone: `America/New_York`,
},
files: [] as AssetFile[],
libraryId: null,
visibility: AssetVisibility.Hidden,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
@@ -573,7 +574,7 @@ export const assetStub = {
files,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[] }),
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
livePhotoWithOriginalFileName: Object.freeze({
id: 'live-photo-still-asset',
@@ -589,10 +590,11 @@ export const assetStub = {
fileSizeInByte: 25_000,
timeZone: `America/New_York`,
},
files: [] as AssetFile[],
libraryId: null,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[] }),
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
withLocation: Object.freeze({
id: 'asset-with-favorite-id',
@@ -605,7 +607,6 @@ export const assetStub = {
deviceId: 'device-id',
checksum: Buffer.from('file hash', 'utf8'),
originalPath: '/original/path.ext',
sidecarPath: null,
type: AssetType.Image,
files: [previewFile],
thumbhash: null,
@@ -652,7 +653,7 @@ export const assetStub = {
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile],
files: [previewFile, sidecarFileWithExt],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
@@ -665,7 +666,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: '/original/path.ext.xmp',
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -688,7 +688,7 @@ export const assetStub = {
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile],
files: [previewFile, sidecarFileWithoutExt],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
@@ -701,7 +701,6 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: '/original/path.xmp',
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -734,7 +733,6 @@ export const assetStub = {
livePhotoVideoId: null,
sharedLinks: [],
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 100_000,
} as Exif,
@@ -776,7 +774,6 @@ export const assetStub = {
originalFileName: 'photo.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -812,7 +809,6 @@ export const assetStub = {
originalFileName: 'asset-id.dng',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
@@ -853,7 +849,6 @@ export const assetStub = {
originalFileName: 'asset-id.hif',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',

View File

@@ -1,5 +1,5 @@
import { Kysely } from 'kysely';
import { JobName, SharedLinkType } from 'src/enum';
import { AssetFileType, JobName, SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
@@ -184,7 +184,15 @@ describe(AssetService.name, () => {
jobRepo.queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, sidecarPath: '/path/to/my/sidecar.xmp' });
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({
assetId: oldAsset.id,
path: '/path/to/my/sidecar.xmp',
type: AssetFileType.Sidecar,
});
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });

View File

@@ -82,7 +82,11 @@ describe(MetadataService.name, () => {
process.env.TZ = serverTimeZone ?? undefined;
const { filePath } = await createTestFile(exifData);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as any);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
id: 'asset-1',
originalPath: filePath,
files: [],
} as any);
await sut.handleMetadataExtraction({ id: 'asset-1' });

View File

@@ -10,6 +10,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
updateAllExif: vitest.fn(),
updateDateTimeOriginal: vitest.fn().mockResolvedValue([]),
upsertJobStatus: vitest.fn(),
getForCopy: vitest.fn(),
getByDayOfYear: vitest.fn(),
getByIds: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
@@ -36,6 +37,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getChangedDeltaSync: vitest.fn(),
upsertFile: vitest.fn(),
upsertFiles: vitest.fn(),
deleteFile: vitest.fn(),
deleteFiles: vitest.fn(),
detectOfflineExternalAssets: vitest.fn(),
filterNewExternalAssetPaths: vitest.fn(),

View File

@@ -8,14 +8,22 @@ import {
Memory,
Partner,
Session,
SidecarWriteAsset,
User,
UserAdmin,
} from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { QueueStatisticsDto } from 'src/dtos/queue.dto';
import { AssetStatus, AssetType, AssetVisibility, MemoryType, Permission, UserMetadataKey, UserStatus } from 'src/enum';
import {
AssetFileType,
AssetStatus,
AssetType,
AssetVisibility,
MemoryType,
Permission,
UserMetadataKey,
UserStatus,
} from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
import { v4, v7 } from 'uuid';
@@ -237,7 +245,6 @@ const assetFactory = (asset: Partial<MapAsset> = {}) => ({
originalFileName: 'IMG_123.jpg',
originalPath: `/data/12/34/IMG_123.jpg`,
ownerId: newUuid(),
sidecarPath: null,
stackId: null,
thumbhash: null,
type: AssetType.Image,
@@ -312,12 +319,17 @@ const versionHistoryFactory = () => ({
version: '1.123.45',
});
const assetSidecarWriteFactory = (asset: Partial<SidecarWriteAsset> = {}) => ({
const assetSidecarWriteFactory = () => ({
id: newUuid(),
sidecarPath: '/path/to/original-path.jpg.xmp',
originalPath: '/path/to/original-path.jpg.xmp',
tags: [],
...asset,
files: [
{
id: newUuid(),
path: '/path/to/original-path.jpg.xmp',
type: AssetFileType.Sidecar,
},
],
});
const assetOcrFactory = (