Compare commits

...

5 Commits

Author SHA1 Message Date
Daniel Dietzler
72e6bc8f8a chore: refactor more queries 2026-01-27 19:32:34 +01:00
Daniel Dietzler
03f0106b3d refactor: asset service queries 2026-01-27 17:50:22 +01:00
Mees Frensel
818f7b3e9b fix(web): queue graph formatting for y-axis labels (#25567)
fix(web): queue graph formatting for y axis labels
2026-01-27 10:41:31 -06:00
Alex
44b4f35019 chore: expose upload errors to UI (#25566) 2026-01-27 16:33:44 +00:00
Daniel Dietzler
212c03ceff fix(web): properly encode shared link slug (#25564) 2026-01-27 16:29:51 +01:00
21 changed files with 378 additions and 69 deletions

View File

@@ -572,6 +572,9 @@
"asset_list_layout_sub_title": "Layout",
"asset_list_settings_subtitle": "Photo grid layout settings",
"asset_list_settings_title": "Photo Grid",
"asset_not_found_on_device_android": "Asset not found on device",
"asset_not_found_on_device_ios": "Asset not found on device. If you are using iCloud, the asset may be inaccessible due to bad file stored on iCloud",
"asset_not_found_on_icloud": "Asset not found on iCloud. the asset may be inaccessible due to bad file stored on iCloud",
"asset_offline": "Asset Offline",
"asset_offline_description": "This external asset is no longer found on disk. Please contact your Immich administrator for help.",
"asset_restored_successfully": "Asset restored successfully",
@@ -2295,6 +2298,7 @@
"upload_details": "Upload Details",
"upload_dialog_info": "Do you want to backup the selected Asset(s) to the server?",
"upload_dialog_title": "Upload Asset",
"upload_error_with_count": "Upload error for {count, plural, one {# asset} other {# assets}}",
"upload_errors": "Upload completed with {count, plural, one {# error} other {# errors}}, refresh the page to see new upload assets.",
"upload_finished": "Upload finished",
"upload_progress": "Remaining {remaining, number} - Processed {processed, number}/{total, number}",

View File

@@ -62,6 +62,8 @@ class BackupToggleButtonState extends ConsumerState<BackupToggleButton> with Sin
final iCloudProgress = ref.watch(driftBackupProvider.select((state) => state.iCloudDownloadProgress));
final errorCount = ref.watch(driftBackupProvider.select((state) => state.errorCount));
final isProcessing = uploadTasks.isNotEmpty || isSyncing || iCloudProgress.isNotEmpty;
return AnimatedBuilder(
@@ -149,6 +151,14 @@ class BackupToggleButtonState extends ConsumerState<BackupToggleButton> with Sin
),
],
),
if (errorCount > 0)
Padding(
padding: const EdgeInsets.only(top: 2),
child: Text(
"upload_error_with_count".t(context: context, args: {'count': '$errorCount'}),
style: context.textTheme.labelMedium?.copyWith(color: context.colorScheme.error),
),
),
],
),
),

View File

@@ -149,6 +149,8 @@ class DriftBackupState {
);
}
int get errorCount => uploadItems.values.where((item) => item.isFailed == true).length;
@override
String toString() {
return 'DriftBackupState(totalCount: $totalCount, backupCount: $backupCount, remainderCount: $remainderCount, processingCount: $processingCount, isSyncing: $isSyncing, error: $error, uploadItems: $uploadItems, cancelToken: $cancelToken, iCloudDownloadProgress: $iCloudDownloadProgress)';

View File

@@ -260,6 +260,7 @@ class BackgroundUploadService {
Future<UploadTask?> getUploadTask(LocalAsset asset, {String group = kBackupGroup, int? priority}) async {
final entity = await _storageRepository.getAssetEntityForAsset(asset);
if (entity == null) {
_logger.warning("Asset entity not found for ${asset.id} - ${asset.name}");
return null;
}
@@ -282,6 +283,7 @@ class BackgroundUploadService {
}
if (file == null) {
_logger.warning("Failed to get file for asset ${asset.id} - ${asset.name}");
return null;
}

View File

@@ -10,6 +10,7 @@ import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/extensions/network_capability_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/backup.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/platform/connectivity_api.g.dart';
@@ -266,6 +267,10 @@ class ForegroundUploadService {
try {
final entity = await _storageRepository.getAssetEntityForAsset(asset);
if (entity == null) {
callbacks.onError?.call(
asset.localId!,
CurrentPlatform.isAndroid ? "asset_not_found_on_device_android".t() : "asset_not_found_on_device_ios".t(),
);
return;
}
@@ -298,6 +303,11 @@ class ForegroundUploadService {
// Get files locally
file = await _storageRepository.getFileForAsset(asset.id);
if (file == null) {
_logger.warning("Failed to get file ${asset.id} - ${asset.name}");
callbacks.onError?.call(
asset.localId!,
CurrentPlatform.isAndroid ? "asset_not_found_on_device_android".t() : "asset_not_found_on_device_ios".t(),
);
return;
}
@@ -306,12 +316,17 @@ class ForegroundUploadService {
livePhotoFile = await _storageRepository.getMotionFileForAsset(asset);
if (livePhotoFile == null) {
_logger.warning("Failed to obtain motion part of the livePhoto - ${asset.name}");
callbacks.onError?.call(
asset.localId!,
CurrentPlatform.isAndroid ? "asset_not_found_on_device_android".t() : "asset_not_found_on_device_ios".t(),
);
}
}
}
if (file == null) {
_logger.warning("Failed to obtain file for asset ${asset.id} - ${asset.name}");
_logger.warning("Failed to obtain file from iCloud for asset ${asset.id} - ${asset.name}");
callbacks.onError?.call(asset.localId!, "asset_not_found_on_icloud".t());
return;
}

View File

@@ -622,3 +622,98 @@ from
where
"asset"."id" = $1
and "asset"."type" = $2
-- AssetRepository.getForOcr
select
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits",
"asset_exif"."exifImageWidth",
"asset_exif"."exifImageHeight",
"asset_exif"."orientation"
from
"asset"
inner join "asset_exif" on "asset_exif"."assetId" = "asset"."id"
where
"asset"."id" = $1
-- AssetRepository.getForEdit
select
"asset"."type",
"asset"."livePhotoVideoId",
"asset"."originalPath",
"asset"."originalFileName",
"asset_exif"."exifImageWidth",
"asset_exif"."exifImageHeight",
"asset_exif"."orientation",
"asset_exif"."projectionType"
from
"asset"
inner join "asset_exif" on "asset_exif"."assetId" = "asset"."id"
where
"asset"."id" = $1
-- AssetRepository.getForMetadataExtractionTags
select
"asset_exif"."tags"
from
"asset_exif"
where
"asset_exif"."assetId" = $1
-- AssetRepository.getForFaces
select
"asset_exif"."exifImageHeight",
"asset_exif"."exifImageWidth",
"asset_exif"."orientation",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits"
from
"asset"
inner join "asset_exif" on "asset_exif"."assetId" = "asset"."id"
where
"asset"."id" = $1
-- AssetRepository.getForUpdateTags
select
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"tag"."value"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags"
from
"asset"
where
"asset"."id" = $1

View File

@@ -1,5 +1,6 @@
import { Injectable } from '@nestjs/common';
import { ExpressionBuilder, Insertable, Kysely, NotNull, Selectable, sql, Updateable, UpdateResult } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { isEmpty, isUndefined, omitBy } from 'lodash';
import { InjectKysely } from 'nestjs-kysely';
import { LockableProperty, Stack } from 'src/database';
@@ -1053,4 +1054,68 @@ export class AssetRepository {
.where('asset.type', '=', AssetType.Video)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForOcr(id: string) {
return this.db
.selectFrom('asset')
.where('asset.id', '=', id)
.select(withEdits)
.innerJoin('asset_exif', (join) => join.onRef('asset_exif.assetId', '=', 'asset.id'))
.select(['asset_exif.exifImageWidth', 'asset_exif.exifImageHeight', 'asset_exif.orientation'])
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForEdit(id: string) {
return this.db
.selectFrom('asset')
.select(['asset.type', 'asset.livePhotoVideoId', 'asset.originalPath', 'asset.originalFileName'])
.where('asset.id', '=', id)
.innerJoin('asset_exif', (join) => join.onRef('asset_exif.assetId', '=', 'asset.id'))
.select([
'asset_exif.exifImageWidth',
'asset_exif.exifImageHeight',
'asset_exif.orientation',
'asset_exif.projectionType',
])
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForMetadataExtractionTags(id: string) {
return this.db
.selectFrom('asset_exif')
.select('asset_exif.tags')
.where('asset_exif.assetId', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForFaces(id: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', (join) => join.onRef('asset_exif.assetId', '=', 'asset.id'))
.select(['asset_exif.exifImageHeight', 'asset_exif.exifImageWidth', 'asset_exif.orientation'])
.select(withEdits)
.where('asset.id', '=', id)
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForUpdateTags(id: string) {
return this.db
.selectFrom('asset')
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('tag')
.select('tag.value')
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags'),
)
.where('asset.id', '=', id)
.executeTakeFirstOrThrow();
}
}

View File

@@ -705,7 +705,7 @@ describe(AssetService.name, () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([ocr1, ocr2]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
mocks.asset.getForOcr.mockResolvedValue({ edits: [], ...factory.exif() });
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([ocr1, ocr2]);
@@ -720,7 +720,7 @@ describe(AssetService.name, () => {
it('should return empty array when no OCR data exists', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
mocks.asset.getForOcr.mockResolvedValue({ edits: [factory.assetEdit()], ...factory.exif() });
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([]);
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');

View File

@@ -401,15 +401,19 @@ export class AssetService extends BaseService {
async getOcr(auth: AuthDto, id: string): Promise<AssetOcrResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
const ocr = await this.ocrRepository.getByAssetId(id);
const asset = await this.assetRepository.getById(id, { exifInfo: true, edits: true });
const asset = await this.assetRepository.getForOcr(id);
if (!asset || !asset.exifInfo || !asset.edits) {
if (!asset) {
throw new BadRequestException('Asset not found');
}
const dimensions = getDimensions(asset.exifInfo);
const dimensions = getDimensions({
exifImageHeight: asset.exifImageHeight,
exifImageWidth: asset.exifImageWidth,
orientation: asset.orientation,
});
return ocr.map((item) => transformOcrBoundingBox(item, asset.edits!, dimensions));
return ocr.map((item) => transformOcrBoundingBox(item, asset.edits, dimensions));
}
async upsertBulkMetadata(auth: AuthDto, dto: AssetMetadataBulkUpsertDto): Promise<AssetMetadataBulkResponseDto[]> {
@@ -549,7 +553,7 @@ export class AssetService extends BaseService {
async editAsset(auth: AuthDto, id: string, dto: AssetEditActionListDto): Promise<AssetEditsDto> {
await this.requireAccess({ auth, permission: Permission.AssetEditCreate, ids: [id] });
const asset = await this.assetRepository.getById(id, { exifInfo: true });
const asset = await this.assetRepository.getForEdit(id);
if (!asset) {
throw new BadRequestException('Asset not found');
}
@@ -574,15 +578,21 @@ export class AssetService extends BaseService {
throw new BadRequestException('Editing SVG images is not supported');
}
// check that crop parameters will not go out of bounds
const { width: assetWidth, height: assetHeight } = getDimensions(asset);
if (!assetWidth || !assetHeight) {
throw new BadRequestException('Asset dimensions are not available for editing');
}
const cropIndex = dto.edits.findIndex((e) => e.action === AssetEditAction.Crop);
if (cropIndex > 0) {
throw new BadRequestException('Crop action must be the first edit action');
}
const crop = cropIndex === -1 ? null : (dto.edits[cropIndex] as AssetEditActionCrop);
if (crop) {
// check that crop parameters will not go out of bounds
const { width: assetWidth, height: assetHeight } = getDimensions(asset.exifInfo!);
const { width: assetWidth, height: assetHeight } = getDimensions(asset);
if (!assetWidth || !assetHeight) {
throw new BadRequestException('Asset dimensions are not available for editing');

View File

@@ -387,7 +387,7 @@ describe(MetadataService.name, () => {
it('should extract tags from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent'] });
mockReadTags({ TagsList: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -398,7 +398,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent/Child'] });
mockReadTags({ TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -419,7 +419,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a string', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent'] });
mockReadTags({ Keywords: 'Parent' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -430,7 +430,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent'] });
mockReadTags({ Keywords: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -441,10 +441,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
});
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent', '2024'] });
mockReadTags({ Keywords: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -456,7 +453,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchal tags from Keywords', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent/Child'] });
mockReadTags({ Keywords: 'Parent/Child' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -476,10 +473,7 @@ describe(MetadataService.name, () => {
it('should ignore Keywords when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent/Child', 'Child'] }),
});
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent/Child', 'Child'] });
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -499,10 +493,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from HierarchicalSubject', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent/Child', 'TagA'] }),
});
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent/Child', 'TagA'] });
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -524,10 +515,7 @@ describe(MetadataService.name, () => {
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
});
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent', '2024'] });
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -539,7 +527,7 @@ describe(MetadataService.name, () => {
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Mom|Dad'] }) });
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Mom|Dad'] });
mockReadTags({ HierarchicalSubject: ['Mom/Dad'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
@@ -554,10 +542,7 @@ describe(MetadataService.name, () => {
it('should ignore HierarchicalSubject when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent/Child', 'Parent2/Child2'] }),
});
mocks.asset.getForMetadataExtractionTags.mockResolvedValue({ tags: ['Parent/Child', 'Parent2/Child2'] });
mockReadTags({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);

View File

@@ -566,10 +566,10 @@ export class MetadataService extends BaseService {
}
private async applyTagList({ id, ownerId }: { id: string; ownerId: string }) {
const asset = await this.assetRepository.getById(id, { exifInfo: true });
const asset = await this.assetRepository.getForMetadataExtractionTags(id);
const results = await upsertTags(this.tagRepository, {
userId: ownerId,
tags: asset?.exifInfo?.tags ?? [],
tags: asset?.tags ?? [],
});
await this.tagRepository.replaceAssetTags(
id,

View File

@@ -354,7 +354,7 @@ describe(PersonService.name, () => {
it('should get the bounding boxes for an asset', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([faceStub.face1.assetId]));
mocks.person.getFaces.mockResolvedValue([faceStub.primaryFace1]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
mocks.asset.getForFaces.mockResolvedValue({ edits: [], ...factory.exif() });
await expect(sut.getFacesById(authStub.admin, { id: faceStub.face1.assetId })).resolves.toStrictEqual([
mapFaces(faceStub.primaryFace1, authStub.admin),
]);

View File

@@ -127,10 +127,10 @@ export class PersonService extends BaseService {
async getFacesById(auth: AuthDto, dto: FaceDto): Promise<AssetFaceResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.id] });
const faces = await this.personRepository.getFaces(dto.id);
const asset = await this.assetRepository.getById(dto.id, { edits: true, exifInfo: true });
const assetDimensions = getDimensions(asset!.exifInfo!);
const asset = await this.assetRepository.getForFaces(dto.id);
const assetDimensions = getDimensions(asset);
return faces.map((face) => mapFaces(face, auth, asset!.edits!, assetDimensions));
return faces.map((face) => mapFaces(face, auth, asset.edits, assetDimensions));
}
async createNewFeaturePhoto(changeFeaturePhoto: string[]) {

View File

@@ -4,7 +4,6 @@ import { JobStatus } from 'src/enum';
import { TagService } from 'src/services/tag.service';
import { authStub } from 'test/fixtures/auth.stub';
import { tagResponseStub, tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(TagService.name, () => {
@@ -192,10 +191,7 @@ describe(TagService.name, () => {
it('should upsert records', async () => {
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-1', 'tag-2']));
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
tags: [factory.tag({ value: 'tag-1' }), factory.tag({ value: 'tag-2' })],
});
mocks.asset.getForUpdateTags.mockResolvedValue({ tags: [{ value: 'tag-1' }, { value: 'tag-2' }] });
mocks.tag.upsertAssetIds.mockResolvedValue([
{ tagId: 'tag-1', assetId: 'asset-1' },
{ tagId: 'tag-1', assetId: 'asset-2' },
@@ -246,10 +242,7 @@ describe(TagService.name, () => {
mocks.tag.get.mockResolvedValue(tagStub.tag);
mocks.tag.getAssetIds.mockResolvedValue(new Set(['asset-1']));
mocks.tag.addAssetIds.mockResolvedValue();
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
tags: [factory.tag({ value: 'tag-1' })],
});
mocks.asset.getForUpdateTags.mockResolvedValue({ tags: [{ value: 'tag-1' }] });
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-2']));
await expect(
@@ -278,6 +271,7 @@ describe(TagService.name, () => {
it('should throw an error for an invalid id', async () => {
mocks.tag.getAssetIds.mockResolvedValue(new Set());
mocks.tag.removeAssetIds.mockResolvedValue();
mocks.asset.getForUpdateTags.mockResolvedValue({ tags: [] });
await expect(sut.removeAssets(authStub.admin, 'tag-1', { ids: ['asset-1'] })).resolves.toEqual([
{ id: 'asset-1', success: false, error: 'not_found' },
@@ -288,6 +282,7 @@ describe(TagService.name, () => {
mocks.tag.get.mockResolvedValue(tagStub.tag);
mocks.tag.getAssetIds.mockResolvedValue(new Set(['asset-1']));
mocks.tag.removeAssetIds.mockResolvedValue();
mocks.asset.getForUpdateTags.mockResolvedValue({ tags: [] });
await expect(
sut.removeAssets(authStub.admin, 'tag-1', {

View File

@@ -151,10 +151,9 @@ export class TagService extends BaseService {
}
private async updateTags(assetId: string) {
const asset = await this.assetRepository.getById(assetId, { tags: true });
await this.assetRepository.upsertExif(
updateLockedColumns({ assetId, tags: asset?.tags?.map(({ value }) => value) ?? [] }),
{ lockedPropertiesBehavior: 'append' },
);
const { tags } = await this.assetRepository.getForUpdateTags(assetId);
await this.assetRepository.upsertExif(updateLockedColumns({ assetId, tags: tags.map(({ value }) => value) }), {
lockedPropertiesBehavior: 'append',
});
}
}

View File

@@ -1,10 +1,9 @@
import { BadRequestException } from '@nestjs/common';
import { StorageCore } from 'src/cores/storage.core';
import { AssetFile, Exif } from 'src/database';
import { AssetFile } from 'src/database';
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ExifResponseDto } from 'src/dtos/exif.dto';
import { AssetFileType, AssetType, AssetVisibility, Permission } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { AccessRepository } from 'src/repositories/access.repository';
@@ -210,20 +209,26 @@ const isFlipped = (orientation?: string | null) => {
return value && [5, 6, 7, 8, -90, 90].includes(value);
};
export const getDimensions = (exifInfo: ExifResponseDto | Exif) => {
const { exifImageWidth: width, exifImageHeight: height } = exifInfo;
export const getDimensions = ({
exifImageHeight: height,
exifImageWidth: width,
orientation,
}: {
exifImageHeight: number | null;
exifImageWidth: number | null;
orientation: string | null;
}) => {
if (!width || !height) {
return { width: 0, height: 0 };
}
if (isFlipped(exifInfo.orientation)) {
if (isFlipped(orientation)) {
return { width: height, height: width };
}
return { width, height };
};
export const isPanorama = (asset: { exifInfo?: Exif | null; originalFileName: string }) => {
return asset.exifInfo?.projectionType === 'EQUIRECTANGULAR' || asset.originalFileName.toLowerCase().endsWith('.insp');
export const isPanorama = (asset: { projectionType: string | null; originalFileName: string }) => {
return asset.projectionType === 'EQUIRECTANGULAR' || asset.originalFileName.toLowerCase().endsWith('.insp');
};

View File

@@ -1,12 +1,15 @@
import { Kysely } from 'kysely';
import { AssetEditAction } from 'src/dtos/editing.dto';
import { AssetFileType, AssetMetadataKey, JobName, SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
@@ -25,6 +28,7 @@ const setup = (db?: Kysely<DB>) => {
database: db || defaultDatabase,
real: [
AssetRepository,
AssetEditRepository,
AssetJobRepository,
AlbumRepository,
AccessRepository,
@@ -32,7 +36,7 @@ const setup = (db?: Kysely<DB>) => {
StackRepository,
UserRepository,
],
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository, OcrRepository],
});
};
@@ -431,6 +435,57 @@ describe(AssetService.name, () => {
});
});
describe('getOcr', () => {
it('should require access', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await expect(sut.getOcr(auth, asset.id)).rejects.toThrow('Not found or no asset.read access');
});
it('should work', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, exifImageHeight: 42, exifImageWidth: 69, orientation: '1' });
ctx.getMock(OcrRepository).getByAssetId.mockResolvedValue([factory.assetOcr()]);
await expect(sut.getOcr(auth, asset.id)).resolves.toEqual([
expect.objectContaining({ x1: 0.1, x2: 0.3, x3: 0.3, x4: 0.1, y1: 0.2, y2: 0.2, y3: 0.4, y4: 0.4 }),
]);
});
it('should apply rotation', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, exifImageHeight: 42, exifImageWidth: 69, orientation: '1' });
await ctx.database
.insertInto('asset_edit')
.values({ assetId: asset.id, action: AssetEditAction.Rotate, parameters: { angle: 90 }, sequence: 1 })
.execute();
ctx.getMock(OcrRepository).getByAssetId.mockResolvedValue([factory.assetOcr()]);
await expect(sut.getOcr(auth, asset.id)).resolves.toEqual([
expect.objectContaining({
x1: 0.6,
x2: 0.8,
x3: 0.8,
x4: 0.6,
y1: expect.any(Number),
y2: expect.any(Number),
y3: 0.3,
y4: 0.3,
}),
]);
});
});
describe('upsertBulkMetadata', () => {
it('should work', async () => {
const { sut, ctx } = setup();
@@ -603,4 +658,38 @@ describe(AssetService.name, () => {
expect(metadata).toEqual([expect.objectContaining({ key: 'some-other-key', value: { foo: 'bar' } })]);
});
});
describe('editAsset', () => {
it('should require access', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await expect(
sut.editAsset(auth, asset.id, { edits: [{ action: AssetEditAction.Rotate, parameters: { angle: 90 } }] }),
).rejects.toThrow('Not found or no asset.edit.create access');
});
it('should work', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, exifImageHeight: 42, exifImageWidth: 69, orientation: '1' });
const editAction = { action: AssetEditAction.Rotate, parameters: { angle: 90 } } as const;
await expect(sut.editAsset(auth, asset.id, { edits: [editAction] })).resolves.toEqual({
assetId: asset.id,
edits: [editAction],
});
await expect(ctx.get(AssetRepository).getById(asset.id)).resolves.toEqual(
expect.objectContaining({ isEdited: true }),
);
await expect(ctx.get(AssetEditRepository).getAll(asset.id)).resolves.toEqual([editAction]);
});
});
});

View File

@@ -53,5 +53,10 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getForOriginal: vitest.fn(),
getForThumbnail: vitest.fn(),
getForVideo: vitest.fn(),
getForEdit: vitest.fn(),
getForOcr: vitest.fn(),
getForMetadataExtractionTags: vitest.fn(),
getForFaces: vitest.fn(),
getForUpdateTags: vitest.fn(),
};
};

View File

@@ -60,7 +60,7 @@
const axisOptions: Axis = {
stroke: () => (isDark ? '#ccc' : 'black'),
ticks: {
show: true,
show: false,
stroke: () => (isDark ? '#444' : '#ddd'),
},
grid: {
@@ -116,6 +116,8 @@
axes: [
{
...axisOptions,
size: 40,
ticks: { show: true },
values: (plot, values) => {
return values.map((value) => {
if (!value) {
@@ -125,7 +127,10 @@
});
},
},
axisOptions,
{
...axisOptions,
size: 60,
},
],
};

View File

@@ -0,0 +1,21 @@
import { asUrl } from '$lib/services/shared-link.service';
import type { ServerConfigDto } from '@immich/sdk';
import { sharedLinkFactory } from '@test-data/factories/shared-link-factory';
describe('SharedLinkService', () => {
beforeAll(() => {
vi.mock(import('$lib/managers/server-config-manager.svelte'), () => ({
serverConfigManager: {
value: { externalDomain: 'http://localhost:2283' } as ServerConfigDto,
init: vi.fn(),
loadServerConfig: vi.fn(),
},
}));
});
describe('asUrl', () => {
it('should properly encode characters in slug', () => {
expect(asUrl(sharedLinkFactory.build({ slug: 'foo/bar' }))).toBe('http://localhost:2283/s/foo%2Fbar');
});
});
});

View File

@@ -60,8 +60,10 @@ export const getSharedLinkActions = ($t: MessageFormatter, sharedLink: SharedLin
return { Edit, Delete, Copy, ViewQrCode };
};
const asUrl = (sharedLink: SharedLinkResponseDto) => {
const path = sharedLink.slug ? `s/${sharedLink.slug}` : `share/${sharedLink.key}`;
export const asUrl = (sharedLink: SharedLinkResponseDto) => {
const path = sharedLink.slug
? `s/${encodeURIComponent(sharedLink.slug)}`
: `share/${encodeURIComponent(sharedLink.key)}`;
return new URL(path, serverConfigManager.value.externalDomain || globalThis.location.origin).href;
};