mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 04:41:40 -08:00
Compare commits
7 Commits
popup-menu
...
renovate/f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb4e061d09 | ||
|
|
4f93eda8d8 | ||
|
|
f5df5fa98d | ||
|
|
f07d1441ea | ||
|
|
1bcf28c062 | ||
|
|
62628dfcfa | ||
|
|
b11aecd184 |
2
.github/workflows/cli.yml
vendored
2
.github/workflows/cli.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:73a05fc805dfd3bd29bebc08442aedfec5c419c5ad3421ec73edc5647233891a
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:237cdae7783609c96f18037a513d38088713cf4a2e493a3aa136d0c45490749a
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/autobuild@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -83,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -572,7 +572,7 @@ jobs:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
# python-version: 3.11
|
||||
|
||||
@@ -1006,7 +1006,7 @@ describe('/libraries', () => {
|
||||
rmSync(`${testAssetDir}/temp/xmp`, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should switch from using file metadata to file.xmp metadata when asset refreshes', async () => {
|
||||
it('should switch from using file metadata to file.ext.xmp metadata when asset refreshes', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/xmp`],
|
||||
|
||||
@@ -2,8 +2,8 @@ experimental_monorepo_root = true
|
||||
|
||||
[tools]
|
||||
node = "24.11.1"
|
||||
flutter = "3.35.7"
|
||||
pnpm = "10.22.0"
|
||||
flutter = "3.38.3"
|
||||
pnpm = "10.24.0"
|
||||
terragrunt = "0.93.10"
|
||||
opentofu = "1.10.7"
|
||||
java = "25.0.1"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:timezone/timezone.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
|
||||
extension TZExtension on Asset {
|
||||
/// Returns the created time of the asset from the exif info (if available) or from
|
||||
@@ -7,24 +7,11 @@ extension TZExtension on Asset {
|
||||
/// the timezone offset in [Duration]
|
||||
(DateTime, Duration) getTZAdjustedTimeAndOffset() {
|
||||
DateTime dt = fileCreatedAt.toLocal();
|
||||
|
||||
if (exifInfo?.dateTimeOriginal != null) {
|
||||
dt = exifInfo!.dateTimeOriginal!;
|
||||
if (exifInfo?.timeZone != null) {
|
||||
dt = dt.toUtc();
|
||||
try {
|
||||
final location = getLocation(exifInfo!.timeZone!);
|
||||
dt = TZDateTime.from(dt, location);
|
||||
} on LocationNotFoundException {
|
||||
RegExp re = RegExp(r'^utc(?:([+-]\d{1,2})(?::(\d{2}))?)?$', caseSensitive: false);
|
||||
final m = re.firstMatch(exifInfo!.timeZone!);
|
||||
if (m != null) {
|
||||
final duration = Duration(hours: int.parse(m.group(1) ?? '0'), minutes: int.parse(m.group(2) ?? '0'));
|
||||
dt = dt.add(duration);
|
||||
return (dt, duration);
|
||||
}
|
||||
}
|
||||
}
|
||||
return applyTimezoneOffset(dateTime: exifInfo!.dateTimeOriginal!, timeZone: exifInfo?.timeZone);
|
||||
}
|
||||
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/exif.model.dart';
|
||||
import 'package:immich_mobile/domain/models/setting.model.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/duration_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
|
||||
@@ -29,6 +30,7 @@ import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/utils/action_button.utils.dart';
|
||||
import 'package:immich_mobile/utils/bytes_units.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_toast.dart';
|
||||
|
||||
const _kSeparator = ' • ';
|
||||
@@ -85,13 +87,21 @@ class AssetDetailBottomSheet extends ConsumerWidget {
|
||||
class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
const _AssetDetailBottomSheet();
|
||||
|
||||
String _getDateTime(BuildContext ctx, BaseAsset asset) {
|
||||
final dateTime = asset.createdAt.toLocal();
|
||||
String _getDateTime(BuildContext ctx, BaseAsset asset, ExifInfo? exifInfo) {
|
||||
DateTime dateTime = asset.createdAt.toLocal();
|
||||
Duration timeZoneOffset = dateTime.timeZoneOffset;
|
||||
|
||||
// Use EXIF timezone information if available (matching web app behavior)
|
||||
if (exifInfo?.dateTimeOriginal != null) {
|
||||
(dateTime, timeZoneOffset) = applyTimezoneOffset(
|
||||
dateTime: exifInfo!.dateTimeOriginal!,
|
||||
timeZone: exifInfo.timeZone,
|
||||
);
|
||||
}
|
||||
|
||||
final date = DateFormat.yMMMEd(ctx.locale.toLanguageTag()).format(dateTime);
|
||||
final time = DateFormat.jm(ctx.locale.toLanguageTag()).format(dateTime);
|
||||
final timezone = dateTime.timeZoneOffset.isNegative
|
||||
? 'UTC-${dateTime.timeZoneOffset.inHours.abs().toString().padLeft(2, '0')}:${(dateTime.timeZoneOffset.inMinutes.abs() % 60).toString().padLeft(2, '0')}'
|
||||
: 'UTC+${dateTime.timeZoneOffset.inHours.toString().padLeft(2, '0')}:${(dateTime.timeZoneOffset.inMinutes.abs() % 60).toString().padLeft(2, '0')}';
|
||||
final timezone = 'GMT${timeZoneOffset.formatAsOffset()}';
|
||||
return '$date$_kSeparator$time $timezone';
|
||||
}
|
||||
|
||||
@@ -269,7 +279,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
children: [
|
||||
// Asset Date and Time
|
||||
SheetTile(
|
||||
title: _getDateTime(context, asset),
|
||||
title: _getDateTime(context, asset, exifInfo),
|
||||
titleStyle: context.textTheme.bodyMedium?.copyWith(fontWeight: FontWeight.w600),
|
||||
trailing: asset.hasRemote && isOwner ? const Icon(Icons.edit, size: 18) : null,
|
||||
onTap: asset.hasRemote && isOwner ? () async => await _editDateTime(context, ref) : null,
|
||||
|
||||
@@ -15,6 +15,7 @@ import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/repositories/download.repository.dart';
|
||||
import 'package:immich_mobile/repositories/drift_album_api_repository.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:immich_mobile/widgets/common/date_time_picker.dart';
|
||||
import 'package:immich_mobile/widgets/common/location_picker.dart';
|
||||
import 'package:maplibre_gl/maplibre_gl.dart' as maplibre;
|
||||
@@ -175,9 +176,17 @@ class ActionService {
|
||||
}
|
||||
|
||||
final exifData = await _remoteAssetRepository.getExif(assetId);
|
||||
initialDate = asset.createdAt.toLocal();
|
||||
offset = initialDate.timeZoneOffset;
|
||||
timeZone = exifData?.timeZone;
|
||||
|
||||
// Use EXIF timezone information if available (matching web app and display behavior)
|
||||
DateTime dt = asset.createdAt.toLocal();
|
||||
offset = dt.timeZoneOffset;
|
||||
|
||||
if (exifData?.dateTimeOriginal != null) {
|
||||
timeZone = exifData!.timeZone;
|
||||
(dt, offset) = applyTimezoneOffset(dateTime: exifData.dateTimeOriginal!, timeZone: exifData.timeZone);
|
||||
}
|
||||
|
||||
initialDate = dt;
|
||||
}
|
||||
|
||||
final dateTime = await showDateTimePicker(
|
||||
|
||||
35
mobile/lib/utils/timezone.dart
Normal file
35
mobile/lib/utils/timezone.dart
Normal file
@@ -0,0 +1,35 @@
|
||||
import 'package:timezone/timezone.dart';
|
||||
|
||||
/// Applies timezone conversion to a DateTime using EXIF timezone information.
|
||||
///
|
||||
/// This function handles two timezone formats:
|
||||
/// 1. Named timezone locations (e.g., "Asia/Hong_Kong")
|
||||
/// 2. UTC offset format (e.g., "UTC+08:00", "UTC-05:00")
|
||||
///
|
||||
/// Returns a tuple of (adjusted DateTime, timezone offset Duration)
|
||||
(DateTime, Duration) applyTimezoneOffset({required DateTime dateTime, required String? timeZone}) {
|
||||
DateTime dt = dateTime.toUtc();
|
||||
|
||||
if (timeZone == null) {
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to get timezone location from database
|
||||
final location = getLocation(timeZone);
|
||||
dt = TZDateTime.from(dt, location);
|
||||
return (dt, dt.timeZoneOffset);
|
||||
} on LocationNotFoundException {
|
||||
// Handle UTC offset format (e.g., "UTC+08:00")
|
||||
RegExp re = RegExp(r'^utc(?:([+-]\d{1,2})(?::(\d{2}))?)?$', caseSensitive: false);
|
||||
final m = re.firstMatch(timeZone);
|
||||
if (m != null) {
|
||||
final duration = Duration(hours: int.parse(m.group(1) ?? '0'), minutes: int.parse(m.group(2) ?? '0'));
|
||||
dt = dt.add(duration);
|
||||
return (dt, duration);
|
||||
}
|
||||
}
|
||||
|
||||
// If timezone is invalid, return UTC
|
||||
return (dt, dt.timeZoneOffset);
|
||||
}
|
||||
@@ -14,7 +14,7 @@ class WorkflowActionItemDto {
|
||||
/// Returns a new [WorkflowActionItemDto] instance.
|
||||
WorkflowActionItemDto({
|
||||
this.actionConfig,
|
||||
required this.actionId,
|
||||
required this.pluginActionId,
|
||||
});
|
||||
|
||||
///
|
||||
@@ -25,21 +25,21 @@ class WorkflowActionItemDto {
|
||||
///
|
||||
Object? actionConfig;
|
||||
|
||||
String actionId;
|
||||
String pluginActionId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowActionItemDto &&
|
||||
other.actionConfig == actionConfig &&
|
||||
other.actionId == actionId;
|
||||
other.pluginActionId == pluginActionId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(actionConfig == null ? 0 : actionConfig!.hashCode) +
|
||||
(actionId.hashCode);
|
||||
(pluginActionId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowActionItemDto[actionConfig=$actionConfig, actionId=$actionId]';
|
||||
String toString() => 'WorkflowActionItemDto[actionConfig=$actionConfig, pluginActionId=$pluginActionId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -48,7 +48,7 @@ class WorkflowActionItemDto {
|
||||
} else {
|
||||
// json[r'actionConfig'] = null;
|
||||
}
|
||||
json[r'actionId'] = this.actionId;
|
||||
json[r'pluginActionId'] = this.pluginActionId;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ class WorkflowActionItemDto {
|
||||
|
||||
return WorkflowActionItemDto(
|
||||
actionConfig: mapValueOfType<Object>(json, r'actionConfig'),
|
||||
actionId: mapValueOfType<String>(json, r'actionId')!,
|
||||
pluginActionId: mapValueOfType<String>(json, r'pluginActionId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -110,7 +110,7 @@ class WorkflowActionItemDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'actionId',
|
||||
'pluginActionId',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -14,41 +14,41 @@ class WorkflowActionResponseDto {
|
||||
/// Returns a new [WorkflowActionResponseDto] instance.
|
||||
WorkflowActionResponseDto({
|
||||
required this.actionConfig,
|
||||
required this.actionId,
|
||||
required this.id,
|
||||
required this.order,
|
||||
required this.pluginActionId,
|
||||
required this.workflowId,
|
||||
});
|
||||
|
||||
Object? actionConfig;
|
||||
|
||||
String actionId;
|
||||
|
||||
String id;
|
||||
|
||||
num order;
|
||||
|
||||
String pluginActionId;
|
||||
|
||||
String workflowId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowActionResponseDto &&
|
||||
other.actionConfig == actionConfig &&
|
||||
other.actionId == actionId &&
|
||||
other.id == id &&
|
||||
other.order == order &&
|
||||
other.pluginActionId == pluginActionId &&
|
||||
other.workflowId == workflowId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(actionConfig == null ? 0 : actionConfig!.hashCode) +
|
||||
(actionId.hashCode) +
|
||||
(id.hashCode) +
|
||||
(order.hashCode) +
|
||||
(pluginActionId.hashCode) +
|
||||
(workflowId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowActionResponseDto[actionConfig=$actionConfig, actionId=$actionId, id=$id, order=$order, workflowId=$workflowId]';
|
||||
String toString() => 'WorkflowActionResponseDto[actionConfig=$actionConfig, id=$id, order=$order, pluginActionId=$pluginActionId, workflowId=$workflowId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -57,9 +57,9 @@ class WorkflowActionResponseDto {
|
||||
} else {
|
||||
// json[r'actionConfig'] = null;
|
||||
}
|
||||
json[r'actionId'] = this.actionId;
|
||||
json[r'id'] = this.id;
|
||||
json[r'order'] = this.order;
|
||||
json[r'pluginActionId'] = this.pluginActionId;
|
||||
json[r'workflowId'] = this.workflowId;
|
||||
return json;
|
||||
}
|
||||
@@ -74,9 +74,9 @@ class WorkflowActionResponseDto {
|
||||
|
||||
return WorkflowActionResponseDto(
|
||||
actionConfig: mapValueOfType<Object>(json, r'actionConfig'),
|
||||
actionId: mapValueOfType<String>(json, r'actionId')!,
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
order: num.parse('${json[r'order']}'),
|
||||
pluginActionId: mapValueOfType<String>(json, r'pluginActionId')!,
|
||||
workflowId: mapValueOfType<String>(json, r'workflowId')!,
|
||||
);
|
||||
}
|
||||
@@ -126,9 +126,9 @@ class WorkflowActionResponseDto {
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'actionConfig',
|
||||
'actionId',
|
||||
'id',
|
||||
'order',
|
||||
'pluginActionId',
|
||||
'workflowId',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ class WorkflowFilterItemDto {
|
||||
/// Returns a new [WorkflowFilterItemDto] instance.
|
||||
WorkflowFilterItemDto({
|
||||
this.filterConfig,
|
||||
required this.filterId,
|
||||
required this.pluginFilterId,
|
||||
});
|
||||
|
||||
///
|
||||
@@ -25,21 +25,21 @@ class WorkflowFilterItemDto {
|
||||
///
|
||||
Object? filterConfig;
|
||||
|
||||
String filterId;
|
||||
String pluginFilterId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowFilterItemDto &&
|
||||
other.filterConfig == filterConfig &&
|
||||
other.filterId == filterId;
|
||||
other.pluginFilterId == pluginFilterId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(filterConfig == null ? 0 : filterConfig!.hashCode) +
|
||||
(filterId.hashCode);
|
||||
(pluginFilterId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowFilterItemDto[filterConfig=$filterConfig, filterId=$filterId]';
|
||||
String toString() => 'WorkflowFilterItemDto[filterConfig=$filterConfig, pluginFilterId=$pluginFilterId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -48,7 +48,7 @@ class WorkflowFilterItemDto {
|
||||
} else {
|
||||
// json[r'filterConfig'] = null;
|
||||
}
|
||||
json[r'filterId'] = this.filterId;
|
||||
json[r'pluginFilterId'] = this.pluginFilterId;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ class WorkflowFilterItemDto {
|
||||
|
||||
return WorkflowFilterItemDto(
|
||||
filterConfig: mapValueOfType<Object>(json, r'filterConfig'),
|
||||
filterId: mapValueOfType<String>(json, r'filterId')!,
|
||||
pluginFilterId: mapValueOfType<String>(json, r'pluginFilterId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -110,7 +110,7 @@ class WorkflowFilterItemDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'filterId',
|
||||
'pluginFilterId',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -14,41 +14,41 @@ class WorkflowFilterResponseDto {
|
||||
/// Returns a new [WorkflowFilterResponseDto] instance.
|
||||
WorkflowFilterResponseDto({
|
||||
required this.filterConfig,
|
||||
required this.filterId,
|
||||
required this.id,
|
||||
required this.order,
|
||||
required this.pluginFilterId,
|
||||
required this.workflowId,
|
||||
});
|
||||
|
||||
Object? filterConfig;
|
||||
|
||||
String filterId;
|
||||
|
||||
String id;
|
||||
|
||||
num order;
|
||||
|
||||
String pluginFilterId;
|
||||
|
||||
String workflowId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is WorkflowFilterResponseDto &&
|
||||
other.filterConfig == filterConfig &&
|
||||
other.filterId == filterId &&
|
||||
other.id == id &&
|
||||
other.order == order &&
|
||||
other.pluginFilterId == pluginFilterId &&
|
||||
other.workflowId == workflowId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(filterConfig == null ? 0 : filterConfig!.hashCode) +
|
||||
(filterId.hashCode) +
|
||||
(id.hashCode) +
|
||||
(order.hashCode) +
|
||||
(pluginFilterId.hashCode) +
|
||||
(workflowId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'WorkflowFilterResponseDto[filterConfig=$filterConfig, filterId=$filterId, id=$id, order=$order, workflowId=$workflowId]';
|
||||
String toString() => 'WorkflowFilterResponseDto[filterConfig=$filterConfig, id=$id, order=$order, pluginFilterId=$pluginFilterId, workflowId=$workflowId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -57,9 +57,9 @@ class WorkflowFilterResponseDto {
|
||||
} else {
|
||||
// json[r'filterConfig'] = null;
|
||||
}
|
||||
json[r'filterId'] = this.filterId;
|
||||
json[r'id'] = this.id;
|
||||
json[r'order'] = this.order;
|
||||
json[r'pluginFilterId'] = this.pluginFilterId;
|
||||
json[r'workflowId'] = this.workflowId;
|
||||
return json;
|
||||
}
|
||||
@@ -74,9 +74,9 @@ class WorkflowFilterResponseDto {
|
||||
|
||||
return WorkflowFilterResponseDto(
|
||||
filterConfig: mapValueOfType<Object>(json, r'filterConfig'),
|
||||
filterId: mapValueOfType<String>(json, r'filterId')!,
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
order: num.parse('${json[r'order']}'),
|
||||
pluginFilterId: mapValueOfType<String>(json, r'pluginFilterId')!,
|
||||
workflowId: mapValueOfType<String>(json, r'workflowId')!,
|
||||
);
|
||||
}
|
||||
@@ -126,9 +126,9 @@ class WorkflowFilterResponseDto {
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'filterConfig',
|
||||
'filterId',
|
||||
'id',
|
||||
'order',
|
||||
'pluginFilterId',
|
||||
'workflowId',
|
||||
};
|
||||
}
|
||||
|
||||
278
mobile/test/utils/timezone_test.dart
Normal file
278
mobile/test/utils/timezone_test.dart
Normal file
@@ -0,0 +1,278 @@
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/utils/timezone.dart';
|
||||
import 'package:timezone/data/latest.dart' as tz;
|
||||
|
||||
void main() {
|
||||
setUpAll(() {
|
||||
tz.initializeTimeZones();
|
||||
});
|
||||
|
||||
group('applyTimezoneOffset', () {
|
||||
group('with named timezone locations', () {
|
||||
test('should convert UTC to Asia/Hong_Kong (+08:00)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Hong_Kong',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20); // 12:00 UTC + 8 hours = 20:00
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should convert UTC to America/New_York (handles DST)', () {
|
||||
// Summer time (EDT = UTC-4)
|
||||
final summerUtc = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
final (summerTime, summerOffset) = applyTimezoneOffset(
|
||||
dateTime: summerUtc,
|
||||
timeZone: 'America/New_York',
|
||||
);
|
||||
|
||||
expect(summerTime.hour, 8); // 12:00 UTC - 4 hours = 08:00
|
||||
expect(summerOffset, const Duration(hours: -4));
|
||||
|
||||
// Winter time (EST = UTC-5)
|
||||
final winterUtc = DateTime.utc(2024, 1, 15, 12, 0, 0);
|
||||
final (winterTime, winterOffset) = applyTimezoneOffset(
|
||||
dateTime: winterUtc,
|
||||
timeZone: 'America/New_York',
|
||||
);
|
||||
|
||||
expect(winterTime.hour, 7); // 12:00 UTC - 5 hours = 07:00
|
||||
expect(winterOffset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should convert UTC to Europe/London', () {
|
||||
// Winter (GMT = UTC+0)
|
||||
final winterUtc = DateTime.utc(2024, 1, 15, 12, 0, 0);
|
||||
final (winterTime, winterOffset) = applyTimezoneOffset(
|
||||
dateTime: winterUtc,
|
||||
timeZone: 'Europe/London',
|
||||
);
|
||||
|
||||
expect(winterTime.hour, 12);
|
||||
expect(winterOffset, Duration.zero);
|
||||
|
||||
// Summer (BST = UTC+1)
|
||||
final summerUtc = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
final (summerTime, summerOffset) = applyTimezoneOffset(
|
||||
dateTime: summerUtc,
|
||||
timeZone: 'Europe/London',
|
||||
);
|
||||
|
||||
expect(summerTime.hour, 13);
|
||||
expect(summerOffset, const Duration(hours: 1));
|
||||
});
|
||||
|
||||
test('should handle timezone with 30-minute offset (Asia/Kolkata)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Kolkata',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 30); // 12:00 UTC + 5:30 = 17:30
|
||||
expect(offset, const Duration(hours: 5, minutes: 30));
|
||||
});
|
||||
|
||||
test('should handle timezone with 45-minute offset (Asia/Kathmandu)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Kathmandu',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 45); // 12:00 UTC + 5:45 = 17:45
|
||||
expect(offset, const Duration(hours: 5, minutes: 45));
|
||||
});
|
||||
});
|
||||
|
||||
group('with UTC offset format', () {
|
||||
test('should handle UTC+08:00 format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+08:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC-05:00 format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC-05:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 7);
|
||||
expect(offset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should handle UTC+8 format (without minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+8',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC-5 format (without minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC-5',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 7);
|
||||
expect(offset, const Duration(hours: -5));
|
||||
});
|
||||
|
||||
test('should handle plain UTC format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 12);
|
||||
expect(offset, Duration.zero);
|
||||
});
|
||||
|
||||
test('should handle lowercase utc format', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'utc+08:00',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 20);
|
||||
expect(offset, const Duration(hours: 8));
|
||||
});
|
||||
|
||||
test('should handle UTC+05:30 format (with minutes)', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC+05:30',
|
||||
);
|
||||
|
||||
expect(adjustedTime.hour, 17);
|
||||
expect(adjustedTime.minute, 30);
|
||||
expect(offset, const Duration(hours: 5, minutes: 30));
|
||||
});
|
||||
});
|
||||
|
||||
group('with null or invalid timezone', () {
|
||||
test('should return UTC time when timezone is null', () {
|
||||
final localTime = DateTime(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: localTime,
|
||||
timeZone: null,
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(offset, adjustedTime.timeZoneOffset);
|
||||
});
|
||||
|
||||
test('should return UTC time when timezone is invalid', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Invalid/Timezone',
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(adjustedTime.hour, 12);
|
||||
expect(offset, adjustedTime.timeZoneOffset);
|
||||
});
|
||||
|
||||
test('should return UTC time when UTC offset format is malformed', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'UTC++08',
|
||||
);
|
||||
|
||||
expect(adjustedTime.isUtc, true);
|
||||
expect(adjustedTime.hour, 12);
|
||||
});
|
||||
});
|
||||
|
||||
group('edge cases', () {
|
||||
test('should handle date crossing midnight forward', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 20, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'Asia/Tokyo', // UTC+9
|
||||
);
|
||||
|
||||
expect(adjustedTime.day, 16); // Crosses to next day
|
||||
expect(adjustedTime.hour, 5); // 20:00 UTC + 9 = 05:00 next day
|
||||
expect(offset, const Duration(hours: 9));
|
||||
});
|
||||
|
||||
test('should handle date crossing midnight backward', () {
|
||||
final utcTime = DateTime.utc(2024, 6, 15, 3, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'America/Los_Angeles', // UTC-7 in summer
|
||||
);
|
||||
|
||||
expect(adjustedTime.day, 14); // Crosses to previous day
|
||||
expect(adjustedTime.hour, 20); // 03:00 UTC - 7 = 20:00 previous day
|
||||
expect(offset, const Duration(hours: -7));
|
||||
});
|
||||
|
||||
test('should handle year boundary crossing', () {
|
||||
final utcTime = DateTime.utc(2024, 1, 1, 2, 0, 0);
|
||||
|
||||
final (adjustedTime, offset) = applyTimezoneOffset(
|
||||
dateTime: utcTime,
|
||||
timeZone: 'America/New_York', // UTC-5 in winter
|
||||
);
|
||||
|
||||
expect(adjustedTime.year, 2023);
|
||||
expect(adjustedTime.month, 12);
|
||||
expect(adjustedTime.day, 31);
|
||||
expect(adjustedTime.hour, 21); // 02:00 UTC - 5 = 21:00 Dec 31
|
||||
});
|
||||
|
||||
test('should convert local time to UTC before applying timezone', () {
|
||||
// Create a local time (not UTC)
|
||||
final localTime = DateTime(2024, 6, 15, 12, 0, 0);
|
||||
|
||||
final (adjustedTime, _) = applyTimezoneOffset(
|
||||
dateTime: localTime,
|
||||
timeZone: 'Asia/Hong_Kong',
|
||||
);
|
||||
|
||||
// The function converts to UTC first, then applies timezone
|
||||
// So local 12:00 -> UTC (depends on local timezone) -> HK time
|
||||
// We can verify it's working by checking it's a TZDateTime
|
||||
expect(adjustedTime, isNotNull);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -23162,13 +23162,13 @@
|
||||
"actionConfig": {
|
||||
"type": "object"
|
||||
},
|
||||
"actionId": {
|
||||
"pluginActionId": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"actionId"
|
||||
"pluginActionId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -23178,24 +23178,24 @@
|
||||
"nullable": true,
|
||||
"type": "object"
|
||||
},
|
||||
"actionId": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"order": {
|
||||
"type": "number"
|
||||
},
|
||||
"pluginActionId": {
|
||||
"type": "string"
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"actionConfig",
|
||||
"actionId",
|
||||
"id",
|
||||
"order",
|
||||
"pluginActionId",
|
||||
"workflowId"
|
||||
],
|
||||
"type": "object"
|
||||
@@ -23244,13 +23244,13 @@
|
||||
"filterConfig": {
|
||||
"type": "object"
|
||||
},
|
||||
"filterId": {
|
||||
"pluginFilterId": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filterId"
|
||||
"pluginFilterId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -23260,24 +23260,24 @@
|
||||
"nullable": true,
|
||||
"type": "object"
|
||||
},
|
||||
"filterId": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"order": {
|
||||
"type": "number"
|
||||
},
|
||||
"pluginFilterId": {
|
||||
"type": "string"
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filterConfig",
|
||||
"filterId",
|
||||
"id",
|
||||
"order",
|
||||
"pluginFilterId",
|
||||
"workflowId"
|
||||
],
|
||||
"type": "object"
|
||||
|
||||
@@ -1729,16 +1729,16 @@ export type CreateProfileImageResponseDto = {
|
||||
};
|
||||
export type WorkflowActionResponseDto = {
|
||||
actionConfig: object | null;
|
||||
actionId: string;
|
||||
id: string;
|
||||
order: number;
|
||||
pluginActionId: string;
|
||||
workflowId: string;
|
||||
};
|
||||
export type WorkflowFilterResponseDto = {
|
||||
filterConfig: object | null;
|
||||
filterId: string;
|
||||
id: string;
|
||||
order: number;
|
||||
pluginFilterId: string;
|
||||
workflowId: string;
|
||||
};
|
||||
export type WorkflowResponseDto = {
|
||||
@@ -1754,11 +1754,11 @@ export type WorkflowResponseDto = {
|
||||
};
|
||||
export type WorkflowActionItemDto = {
|
||||
actionConfig?: object;
|
||||
actionId: string;
|
||||
pluginActionId: string;
|
||||
};
|
||||
export type WorkflowFilterItemDto = {
|
||||
filterConfig?: object;
|
||||
filterId: string;
|
||||
pluginFilterId: string;
|
||||
};
|
||||
export type WorkflowCreateDto = {
|
||||
actions: WorkflowActionItemDto[];
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"version": "0.0.1",
|
||||
"description": "Monorepo for Immich",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.22.0+sha512.bf049efe995b28f527fd2b41ae0474ce29186f7edcb3bf545087bd61fbbebb2bf75362d1307fda09c2d288e1e499787ac12d4fcb617a974718a6051f2eee741c",
|
||||
"packageManager": "pnpm@10.24.0+sha512.01ff8ae71b4419903b65c60fb2dc9d34cf8bb6e06d03bde112ef38f7a34d6904c424ba66bea5cdcf12890230bf39f9580473140ed9c946fef328b6e5238a345a",
|
||||
"engines": {
|
||||
"pnpm": ">=10.0.0"
|
||||
}
|
||||
|
||||
1472
pnpm-lock.yaml
generated
1472
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -45,14 +45,14 @@
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.54.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.60.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.56.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.61.0",
|
||||
"@opentelemetry/resources": "^2.0.1",
|
||||
"@opentelemetry/sdk-metrics": "^2.0.1",
|
||||
"@opentelemetry/sdk-node": "^0.207.0",
|
||||
"@opentelemetry/sdk-node": "^0.208.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.34.0",
|
||||
"@react-email/components": "^0.5.0",
|
||||
"@react-email/render": "^1.1.2",
|
||||
|
||||
@@ -305,7 +305,7 @@ export class StorageCore {
|
||||
return this.assetRepository.update({ id, encodedVideoPath: newPath });
|
||||
}
|
||||
case AssetPathType.Sidecar: {
|
||||
return this.assetRepository.update({ id, sidecarPath: newPath });
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
|
||||
}
|
||||
case PersonPathType.Face: {
|
||||
return this.personRepository.update({ id, thumbnailPath: newPath });
|
||||
|
||||
@@ -122,7 +122,6 @@ export type Asset = {
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
@@ -156,13 +155,6 @@ export type StorageAsset = {
|
||||
encodedVideoPath: string | null;
|
||||
};
|
||||
|
||||
export type SidecarWriteAsset = {
|
||||
id: string;
|
||||
sidecarPath: string | null;
|
||||
originalPath: string;
|
||||
tags: Array<{ value: string }>;
|
||||
};
|
||||
|
||||
export type Stack = {
|
||||
id: string;
|
||||
primaryAssetId: string;
|
||||
@@ -309,14 +301,14 @@ export type Workflow = Selectable<WorkflowTable> & {
|
||||
|
||||
export type WorkflowFilter = Selectable<WorkflowFilterTable> & {
|
||||
workflowId: string;
|
||||
filterId: string;
|
||||
pluginFilterId: string;
|
||||
filterConfig: FilterConfig | null;
|
||||
order: number;
|
||||
};
|
||||
|
||||
export type WorkflowAction = Selectable<WorkflowActionTable> & {
|
||||
workflowId: string;
|
||||
actionId: string;
|
||||
pluginActionId: string;
|
||||
actionConfig: ActionConfig | null;
|
||||
order: number;
|
||||
};
|
||||
@@ -347,7 +339,6 @@ export const columns = {
|
||||
'asset.originalFileName',
|
||||
'asset.originalPath',
|
||||
'asset.ownerId',
|
||||
'asset.sidecarPath',
|
||||
'asset.type',
|
||||
],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
|
||||
@@ -124,7 +124,6 @@ export type MapAsset = {
|
||||
originalPath: string;
|
||||
owner?: User | null;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
tags?: Tag[];
|
||||
|
||||
@@ -7,7 +7,7 @@ import { Optional, ValidateBoolean, ValidateEnum } from 'src/validation';
|
||||
|
||||
export class WorkflowFilterItemDto {
|
||||
@IsUUID()
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -16,7 +16,7 @@ export class WorkflowFilterItemDto {
|
||||
|
||||
export class WorkflowActionItemDto {
|
||||
@IsUUID()
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -86,7 +86,7 @@ export class WorkflowResponseDto {
|
||||
export class WorkflowFilterResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
filterConfig!: FilterConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -94,7 +94,7 @@ export class WorkflowFilterResponseDto {
|
||||
export class WorkflowActionResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
actionConfig!: ActionConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -103,7 +103,7 @@ export function mapWorkflowFilter(filter: WorkflowFilter): WorkflowFilterRespons
|
||||
return {
|
||||
id: filter.id,
|
||||
workflowId: filter.workflowId,
|
||||
filterId: filter.filterId,
|
||||
pluginFilterId: filter.pluginFilterId,
|
||||
filterConfig: filter.filterConfig,
|
||||
order: filter.order,
|
||||
};
|
||||
@@ -113,7 +113,7 @@ export function mapWorkflowAction(action: WorkflowAction): WorkflowActionRespons
|
||||
return {
|
||||
id: action.id,
|
||||
workflowId: action.workflowId,
|
||||
actionId: action.actionId,
|
||||
pluginActionId: action.pluginActionId,
|
||||
actionConfig: action.actionConfig,
|
||||
order: action.order,
|
||||
};
|
||||
|
||||
@@ -44,6 +44,7 @@ export enum AssetFileType {
|
||||
FullSize = 'fullsize',
|
||||
Preview = 'preview',
|
||||
Thumbnail = 'thumbnail',
|
||||
Sidecar = 'sidecar',
|
||||
}
|
||||
|
||||
export enum AlbumUserRole {
|
||||
|
||||
@@ -20,8 +20,23 @@ limit
|
||||
-- AssetJobRepository.getForSidecarWriteJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
@@ -39,21 +54,36 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.getForSidecarCheckJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath"
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.streamForThumbnailJob
|
||||
select
|
||||
@@ -158,7 +188,6 @@ select
|
||||
"asset"."originalFileName",
|
||||
"asset"."originalPath",
|
||||
"asset"."ownerId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."type",
|
||||
(
|
||||
select
|
||||
@@ -173,11 +202,27 @@ select
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
) as agg
|
||||
) as "faces"
|
||||
) as "faces",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
"asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.getAlbumThumbnailFiles
|
||||
select
|
||||
@@ -322,7 +367,6 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."ownerId",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."encodedVideoPath",
|
||||
"asset"."originalPath",
|
||||
to_json("asset_exif") as "exifInfo",
|
||||
@@ -433,18 +477,33 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."id" = $1
|
||||
and "asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.streamForStorageTemplateJob
|
||||
select
|
||||
@@ -454,12 +513,27 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
@@ -481,11 +555,15 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
(
|
||||
"asset"."sidecarPath" = $1
|
||||
or "asset"."sidecarPath" is null
|
||||
not exists (
|
||||
select
|
||||
"asset_file"."id"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
)
|
||||
and "asset"."visibility" != $2
|
||||
|
||||
-- AssetJobRepository.streamForDetectFacesJob
|
||||
select
|
||||
|
||||
@@ -216,6 +216,34 @@ from
|
||||
limit
|
||||
3
|
||||
|
||||
-- AssetRepository.getForCopy
|
||||
select
|
||||
"id",
|
||||
"stackId",
|
||||
"originalPath",
|
||||
"isFavorite",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"id" = $1::uuid
|
||||
limit
|
||||
$2
|
||||
|
||||
-- AssetRepository.getById
|
||||
select
|
||||
"asset".*
|
||||
|
||||
@@ -6,7 +6,6 @@ import { Asset, columns } from 'src/database';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageAsset } from 'src/types';
|
||||
import {
|
||||
anyUuid,
|
||||
asUuid,
|
||||
@@ -40,7 +39,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.select((eb) =>
|
||||
jsonArrayFrom(
|
||||
eb
|
||||
@@ -59,7 +59,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -122,6 +123,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(columns.asset)
|
||||
.select(withFaces)
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -228,7 +230,6 @@ export class AssetJobRepository {
|
||||
'asset.libraryId',
|
||||
'asset.ownerId',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.sidecarPath',
|
||||
'asset.encodedVideoPath',
|
||||
'asset.originalPath',
|
||||
])
|
||||
@@ -306,26 +307,24 @@ export class AssetJobRepository {
|
||||
'asset.checksum',
|
||||
'asset.originalPath',
|
||||
'asset.isExternal',
|
||||
'asset.sidecarPath',
|
||||
'asset.originalFileName',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.fileCreatedAt',
|
||||
'asset_exif.timeZone',
|
||||
'asset_exif.fileSizeInByte',
|
||||
])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.deletedAt', 'is', null);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst() as Promise<
|
||||
StorageAsset | undefined
|
||||
>;
|
||||
getForStorageTemplateJob(id: string) {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [], stream: true })
|
||||
streamForStorageTemplateJob() {
|
||||
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
|
||||
return this.storageTemplateAssetQuery().stream();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
@@ -343,9 +342,18 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id'])
|
||||
.$if(!force, (qb) =>
|
||||
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
|
||||
qb.where((eb) =>
|
||||
eb.not(
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('asset_file')
|
||||
.select('asset_file.id')
|
||||
.whereRef('asset_file.assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.Sidecar),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.stream();
|
||||
}
|
||||
|
||||
|
||||
@@ -396,6 +396,17 @@ export class AssetRepository {
|
||||
return this.db.selectFrom('asset_file').select(['assetId', 'path']).limit(sql.lit(3)).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForCopy(id: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id', 'stackId', 'originalPath', 'isFavorite'])
|
||||
.select(withFiles)
|
||||
.where('id', '=', asUuid(id))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
return this.db
|
||||
@@ -842,6 +853,10 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteFile({ assetId, type }: { assetId: string; type: AssetFileType }): Promise<void> {
|
||||
await this.db.deleteFrom('asset_file').where('assetId', '=', asUuid(assetId)).where('type', '=', type).execute();
|
||||
}
|
||||
|
||||
async deleteFiles(files: Pick<Selectable<AssetFileTable>, 'id'>[]): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
|
||||
@@ -403,7 +403,6 @@ export class DatabaseRepository {
|
||||
.set((eb) => ({
|
||||
originalPath: eb.fn('REGEXP_REPLACE', ['originalPath', source, target]),
|
||||
encodedVideoPath: eb.fn('REGEXP_REPLACE', ['encodedVideoPath', source, target]),
|
||||
sidecarPath: eb.fn('REGEXP_REPLACE', ['sidecarPath', source, target]),
|
||||
}))
|
||||
.execute();
|
||||
|
||||
|
||||
@@ -24,9 +24,8 @@ export class OAuthRepository {
|
||||
}
|
||||
|
||||
async authorize(config: OAuthConfig, redirectUrl: string, state?: string, codeChallenge?: string) {
|
||||
const { buildAuthorizationUrl, randomState, randomPKCECodeVerifier, calculatePKCECodeChallenge } = await import(
|
||||
'openid-client'
|
||||
);
|
||||
const { buildAuthorizationUrl, randomState, randomPKCECodeVerifier, calculatePKCECodeChallenge } =
|
||||
await import('openid-client');
|
||||
const client = await this.getClient(config);
|
||||
state ??= randomState();
|
||||
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`INSERT INTO "asset_file" ("assetId", "path", "type")
|
||||
SELECT
|
||||
id, "sidecarPath", 'sidecar'
|
||||
FROM "asset"
|
||||
WHERE "sidecarPath" IS NOT NULL AND "sidecarPath" != '';`.execute(db);
|
||||
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "sidecarPath";`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset" ADD "sidecarPath" character varying;`.execute(db);
|
||||
|
||||
await sql`
|
||||
UPDATE "asset"
|
||||
SET "sidecarPath" = "asset_file"."path"
|
||||
FROM "asset_file"
|
||||
WHERE "asset"."id" = "asset_file"."assetId" AND "asset_file"."type" = 'sidecar';
|
||||
`.execute(db);
|
||||
|
||||
await sql`DELETE FROM "asset_file" WHERE "type" = 'sidecar';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_filterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_actionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_filterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_actionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "filterId" TO "pluginFilterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "actionId" TO "pluginActionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_pluginFilterId_fkey" FOREIGN KEY ("pluginFilterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_pluginActionId_fkey" FOREIGN KEY ("pluginActionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_pluginFilterId_idx" ON "workflow_filter" ("pluginFilterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_pluginActionId_idx" ON "workflow_action" ("pluginActionId");`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_pluginFilterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_pluginActionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_pluginFilterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_pluginActionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "pluginFilterId" TO "filterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "pluginActionId" TO "actionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_filterId_fkey" FOREIGN KEY ("filterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_actionId_fkey" FOREIGN KEY ("actionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_filterId_idx" ON "workflow_filter" ("filterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_actionId_idx" ON "workflow_action" ("actionId");`.execute(db);
|
||||
}
|
||||
@@ -105,9 +105,6 @@ export class AssetTable {
|
||||
@Column({ index: true })
|
||||
originalFileName!: string;
|
||||
|
||||
@Column({ nullable: true })
|
||||
sidecarPath!: string | null;
|
||||
|
||||
@Column({ type: 'bytea', nullable: true })
|
||||
thumbhash!: Buffer | null;
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ export class WorkflowTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['filterId'] })
|
||||
@Index({ columns: ['pluginFilterId'] })
|
||||
@Table('workflow_filter')
|
||||
export class WorkflowFilterTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -48,7 +48,7 @@ export class WorkflowFilterTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginFilterTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
filterConfig!: FilterConfig | null;
|
||||
@@ -58,7 +58,7 @@ export class WorkflowFilterTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['actionId'] })
|
||||
@Index({ columns: ['pluginActionId'] })
|
||||
@Table('workflow_action')
|
||||
export class WorkflowActionTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -68,7 +68,7 @@ export class WorkflowActionTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginActionTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
actionConfig!: ActionConfig | null;
|
||||
|
||||
@@ -174,7 +174,6 @@ const assetEntity = Object.freeze({
|
||||
longitude: 10.703_075,
|
||||
},
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: null,
|
||||
} as MapAsset);
|
||||
|
||||
const existingAsset = Object.freeze({
|
||||
@@ -188,7 +187,6 @@ const existingAsset = Object.freeze({
|
||||
|
||||
const sidecarAsset = Object.freeze({
|
||||
...existingAsset,
|
||||
sidecarPath: 'sidecar-path',
|
||||
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
|
||||
}) as MapAsset;
|
||||
|
||||
@@ -721,18 +719,22 @@ describe(AssetMediaService.name, () => {
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: existingAsset.id,
|
||||
sidecarPath: null,
|
||||
originalFileName: 'photo1.jpeg',
|
||||
originalPath: 'fake_path/photo1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sidecarPath: null,
|
||||
originalFileName: 'existing-filename.jpeg',
|
||||
originalPath: 'fake_path/asset_1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
|
||||
deletedAt: expect.any(Date),
|
||||
@@ -769,6 +771,13 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -798,6 +807,12 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -827,6 +842,9 @@ describe(AssetMediaService.name, () => {
|
||||
|
||||
expect(mocks.asset.create).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [updatedFile.originalPath, undefined] },
|
||||
|
||||
@@ -21,7 +21,16 @@ import {
|
||||
UploadFieldName,
|
||||
} from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
CacheControl,
|
||||
JobName,
|
||||
Permission,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { AuthRequest } from 'src/middleware/auth.guard';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { UploadFile, UploadRequest } from 'src/types';
|
||||
@@ -354,9 +363,12 @@ export class AssetMediaService extends BaseService {
|
||||
duration: dto.duration || null,
|
||||
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: sidecarPath || null,
|
||||
});
|
||||
|
||||
await (sidecarPath
|
||||
? this.assetRepository.upsertFile({ assetId, type: AssetFileType.Sidecar, path: sidecarPath })
|
||||
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
|
||||
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
|
||||
await this.jobRepository.queue({
|
||||
@@ -384,7 +396,6 @@ export class AssetMediaService extends BaseService {
|
||||
localDateTime: asset.localDateTime,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
sidecarPath: asset.sidecarPath,
|
||||
});
|
||||
|
||||
const { size } = await this.storageRepository.stat(created.originalPath);
|
||||
@@ -414,7 +425,6 @@ export class AssetMediaService extends BaseService {
|
||||
visibility: dto.visibility ?? AssetVisibility.Timeline,
|
||||
livePhotoVideoId: dto.livePhotoVideoId,
|
||||
originalFileName: dto.filename || file.originalName,
|
||||
sidecarPath: sidecarFile?.originalPath,
|
||||
});
|
||||
|
||||
if (dto.metadata) {
|
||||
@@ -422,6 +432,11 @@ export class AssetMediaService extends BaseService {
|
||||
}
|
||||
|
||||
if (sidecarFile) {
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: asset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
}
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
|
||||
@@ -585,8 +585,8 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
assetWithFace.encodedVideoPath,
|
||||
assetWithFace.sidecarPath,
|
||||
assetWithFace.encodedVideoPath, // this value is null
|
||||
undefined, // no sidecar path
|
||||
assetWithFace.originalPath,
|
||||
],
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { AssetFile } from 'src/database';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
@@ -18,7 +19,16 @@ import {
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetVisibility,
|
||||
JobName,
|
||||
JobStatus,
|
||||
Permission,
|
||||
QueueName,
|
||||
} from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||
import { requireElevatedPermission } from 'src/utils/access';
|
||||
@@ -197,8 +207,8 @@ export class AssetService extends BaseService {
|
||||
}: AssetCopyDto,
|
||||
) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
|
||||
const sourceAsset = await this.assetRepository.getById(sourceId);
|
||||
const targetAsset = await this.assetRepository.getById(targetId);
|
||||
const sourceAsset = await this.assetRepository.getForCopy(sourceId);
|
||||
const targetAsset = await this.assetRepository.getForCopy(targetId);
|
||||
|
||||
if (!sourceAsset || !targetAsset) {
|
||||
throw new BadRequestException('Both assets must exist');
|
||||
@@ -252,19 +262,25 @@ export class AssetService extends BaseService {
|
||||
sourceAsset,
|
||||
targetAsset,
|
||||
}: {
|
||||
sourceAsset: { sidecarPath: string | null };
|
||||
targetAsset: { id: string; sidecarPath: string | null; originalPath: string };
|
||||
sourceAsset: { files: AssetFile[] };
|
||||
targetAsset: { id: string; files: AssetFile[]; originalPath: string };
|
||||
}) {
|
||||
if (!sourceAsset.sidecarPath) {
|
||||
const { sidecarFile: sourceFile } = getAssetFiles(sourceAsset.files);
|
||||
if (!sourceFile?.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (targetAsset.sidecarPath) {
|
||||
await this.storageRepository.unlink(targetAsset.sidecarPath);
|
||||
const { sidecarFile: targetFile } = getAssetFiles(targetAsset.files ?? []);
|
||||
if (targetFile?.path) {
|
||||
await this.storageRepository.unlink(targetFile.path);
|
||||
}
|
||||
|
||||
await this.storageRepository.copyFile(sourceAsset.sidecarPath, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.update({ id: targetAsset.id, sidecarPath: `${targetAsset.originalPath}.xmp` });
|
||||
await this.storageRepository.copyFile(sourceFile.path, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: targetAsset.id,
|
||||
path: `${targetAsset.originalPath}.xmp`,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: targetAsset.id } });
|
||||
}
|
||||
|
||||
@@ -344,11 +360,11 @@ export class AssetService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
|
||||
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||
|
||||
if (deleteOnDisk) {
|
||||
files.push(asset.sidecarPath, asset.originalPath);
|
||||
files.push(sidecarFile?.path, asset.originalPath);
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });
|
||||
|
||||
@@ -4,7 +4,16 @@ import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { defaults } from 'src/config';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
ExifOrientation,
|
||||
ImmichWorker,
|
||||
JobName,
|
||||
JobStatus,
|
||||
SourceType,
|
||||
} from 'src/enum';
|
||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
@@ -15,17 +24,24 @@ import { tagStub } from 'test/fixtures/tag.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const removeNonSidecarFiles = (asset: any) => {
|
||||
return {
|
||||
...asset,
|
||||
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
|
||||
};
|
||||
};
|
||||
|
||||
const forSidecarJob = (
|
||||
asset: {
|
||||
id?: string;
|
||||
originalPath?: string;
|
||||
sidecarPath?: string | null;
|
||||
files?: { id: string; type: AssetFileType; path: string }[];
|
||||
} = {},
|
||||
) => {
|
||||
return {
|
||||
id: factory.uuid(),
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
sidecarPath: null,
|
||||
files: [],
|
||||
...asset,
|
||||
};
|
||||
};
|
||||
@@ -166,7 +182,7 @@ describe(MetadataService.name, () => {
|
||||
it('should handle a date in a sidecar file', async () => {
|
||||
const originalDate = new Date('2023-11-21T16:13:17.517Z');
|
||||
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -185,7 +201,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file modification date when missing exif and earlier than creation date', async () => {
|
||||
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -211,7 +227,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file creation date when missing exif and earlier than modification date', async () => {
|
||||
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -234,7 +250,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
|
||||
process.env.TZ = 'America/Los_Angeles';
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -252,7 +268,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.image.fileModifiedAt,
|
||||
@@ -305,7 +321,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should apply reverse geocoding', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
|
||||
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
@@ -334,7 +350,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should discard latitude and longitude on null island', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mockReadTags({
|
||||
GPSLatitude: 0,
|
||||
GPSLongitude: 0,
|
||||
@@ -346,7 +362,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -356,7 +372,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -376,7 +392,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a string', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -386,7 +402,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -396,7 +412,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -407,7 +423,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchal tags from Keywords', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent/Child' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -426,7 +442,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore Keywords when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -445,7 +461,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from HierarchicalSubject', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -466,7 +482,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -1030,8 +1046,15 @@ describe(MetadataService.name, () => {
|
||||
it('should prefer Duration from exif over sidecar', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
...assetStub.image,
|
||||
sidecarPath: '/path/to/something',
|
||||
files: [
|
||||
{
|
||||
id: 'some-id',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/something',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReadTags({ Duration: 123 }, { Duration: 456 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -1536,18 +1559,25 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .jpg.xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', files: [] });
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.jpg.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
|
||||
@@ -1555,33 +1585,44 @@ describe(MetadataService.name, () => {
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should unset sidecar path if file does not exist anymore', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
|
||||
it('should unset sidecar path if file no longer exist', async () => {
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(false);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith({ assetId: asset.id, type: AssetFileType.Sidecar });
|
||||
});
|
||||
|
||||
it('should do nothing if the sidecar file still exists', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
|
||||
|
||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarWrite', () => {
|
||||
it('should skip assets that do not exist anymore', async () => {
|
||||
it('should skip assets that no longer exist', async () => {
|
||||
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
|
||||
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
|
||||
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
|
||||
@@ -1610,7 +1651,7 @@ describe(MetadataService.name, () => {
|
||||
dateTimeOriginal: date,
|
||||
}),
|
||||
).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, {
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, {
|
||||
Description: description,
|
||||
ImageDescription: description,
|
||||
DateTimeOriginal: date,
|
||||
|
||||
@@ -8,9 +8,10 @@ import { constants } from 'node:fs/promises';
|
||||
import { join, parse } from 'node:path';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { Asset, AssetFace } from 'src/database';
|
||||
import { Asset, AssetFace, AssetFile } from 'src/database';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
DatabaseLock,
|
||||
@@ -29,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { PersonTable } from 'src/schema/tables/person.table';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { getAssetFiles } from 'src/utils/asset.util';
|
||||
import { isAssetChecksumConstraint } from 'src/utils/database';
|
||||
import { isFaceImportEnabled } from 'src/utils/misc';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
@@ -359,17 +361,21 @@ export class MetadataService extends BaseService {
|
||||
break;
|
||||
}
|
||||
|
||||
const isChanged = sidecarPath !== asset.sidecarPath;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const isChanged = sidecarPath !== sidecarFile?.path;
|
||||
|
||||
this.logger.debug(
|
||||
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
`Sidecar check found old=${sidecarFile?.path}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
|
||||
if (!isChanged) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
await this.assetRepository.update({ id: asset.id, sidecarPath });
|
||||
await (sidecarPath === null
|
||||
? this.assetRepository.deleteFile({ assetId: asset.id, type: AssetFileType.Sidecar })
|
||||
: this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }));
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
@@ -394,7 +400,9 @@ export class MetadataService extends BaseService {
|
||||
|
||||
const tagsList = (asset.tags || []).map((tag) => tag.value);
|
||||
|
||||
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
|
||||
|
||||
const exif = _.omitBy(
|
||||
<Tags>{
|
||||
Description: description,
|
||||
@@ -414,18 +422,19 @@ export class MetadataService extends BaseService {
|
||||
|
||||
await this.metadataRepository.writeTags(sidecarPath, exif);
|
||||
|
||||
if (!asset.sidecarPath) {
|
||||
await this.assetRepository.update({ id, sidecarPath });
|
||||
if (asset.files.length === 0) {
|
||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
|
||||
}
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
|
||||
private getSidecarCandidates({ files, originalPath }: { files: AssetFile[]; originalPath: string }) {
|
||||
const candidates: string[] = [];
|
||||
|
||||
if (sidecarPath) {
|
||||
candidates.push(sidecarPath);
|
||||
const { sidecarFile } = getAssetFiles(files);
|
||||
if (sidecarFile?.path) {
|
||||
candidates.push(sidecarFile.path);
|
||||
}
|
||||
|
||||
const assetPath = parse(originalPath);
|
||||
@@ -456,14 +465,12 @@ export class MetadataService extends BaseService {
|
||||
return { width, height };
|
||||
}
|
||||
|
||||
private async getExifTags(asset: {
|
||||
originalPath: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
}): Promise<ImmichTags> {
|
||||
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
|
||||
this.metadataRepository.readTags(asset.originalPath),
|
||||
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
|
||||
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
|
||||
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
|
||||
]);
|
||||
|
||||
|
||||
@@ -247,9 +247,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeFilters(workflowFilters: WorkflowFilter[], context: WorkflowContext): Promise<boolean> {
|
||||
for (const workflowFilter of workflowFilters) {
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.pluginFilterId);
|
||||
if (!filter) {
|
||||
this.logger.error(`Filter ${workflowFilter.filterId} not found`);
|
||||
this.logger.error(`Filter ${workflowFilter.pluginFilterId} not found`);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -291,9 +291,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeActions(workflowActions: WorkflowAction[], context: WorkflowContext): Promise<void> {
|
||||
for (const workflowAction of workflowActions) {
|
||||
const action = await this.pluginRepository.getAction(workflowAction.actionId);
|
||||
const action = await this.pluginRepository.getAction(workflowAction.pluginActionId);
|
||||
if (!action) {
|
||||
throw new Error(`Action ${workflowAction.actionId} not found`);
|
||||
throw new Error(`Action ${workflowAction.pluginActionId} not found`);
|
||||
}
|
||||
|
||||
const pluginInstance = this.loadedPlugins.get(action.pluginId);
|
||||
|
||||
@@ -6,10 +6,20 @@ import sanitize from 'sanitize-filename';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
|
||||
import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetPathType,
|
||||
AssetType,
|
||||
DatabaseLock,
|
||||
JobName,
|
||||
JobStatus,
|
||||
QueueName,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobOf, StorageAsset } from 'src/types';
|
||||
import { getAssetFile } from 'src/utils/asset.util';
|
||||
import { getLivePhotoMotionFilename } from 'src/utils/file';
|
||||
|
||||
const storageTokens = {
|
||||
@@ -196,7 +206,7 @@ export class StorageTemplateService extends BaseService {
|
||||
}
|
||||
|
||||
return this.databaseRepository.withLock(DatabaseLock.StorageTemplateMigration, async () => {
|
||||
const { id, sidecarPath, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const { id, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const oldPath = originalPath;
|
||||
const newPath = await this.getTemplatePath(asset, metadata);
|
||||
|
||||
@@ -213,6 +223,8 @@ export class StorageTemplateService extends BaseService {
|
||||
newPath,
|
||||
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
|
||||
});
|
||||
|
||||
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar)?.path;
|
||||
if (sidecarPath) {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
|
||||
@@ -78,13 +78,13 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
private async validateAndMapFilters(
|
||||
filters: Array<{ filterId: string; filterConfig?: any }>,
|
||||
filters: Array<{ pluginFilterId: string; filterConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of filters) {
|
||||
const filter = await this.pluginRepository.getFilter(dto.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(dto.pluginFilterId);
|
||||
if (!filter) {
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.filterId}`);
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.pluginFilterId}`);
|
||||
}
|
||||
|
||||
if (!filter.supportedContexts.includes(requiredContext)) {
|
||||
@@ -95,20 +95,20 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return filters.map((dto, index) => ({
|
||||
filterId: dto.filterId,
|
||||
pluginFilterId: dto.pluginFilterId,
|
||||
filterConfig: dto.filterConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
}
|
||||
|
||||
private async validateAndMapActions(
|
||||
actions: Array<{ actionId: string; actionConfig?: any }>,
|
||||
actions: Array<{ pluginActionId: string; actionConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of actions) {
|
||||
const action = await this.pluginRepository.getAction(dto.actionId);
|
||||
const action = await this.pluginRepository.getAction(dto.pluginActionId);
|
||||
if (!action) {
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.actionId}`);
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.pluginActionId}`);
|
||||
}
|
||||
if (!action.supportedContexts.includes(requiredContext)) {
|
||||
throw new BadRequestException(
|
||||
@@ -118,7 +118,7 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return actions.map((dto, index) => ({
|
||||
actionId: dto.actionId,
|
||||
pluginActionId: dto.pluginActionId,
|
||||
actionConfig: dto.actionConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { VECTOR_EXTENSIONS } from 'src/constants';
|
||||
import { Asset } from 'src/database';
|
||||
import { Asset, AssetFile } from 'src/database';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
@@ -475,8 +475,8 @@ export type StorageAsset = {
|
||||
fileCreatedAt: Date;
|
||||
originalPath: string;
|
||||
originalFileName: string;
|
||||
sidecarPath: string | null;
|
||||
fileSizeInByte: number | null;
|
||||
files: AssetFile[];
|
||||
};
|
||||
|
||||
export type OnThisDayData = { year: number };
|
||||
|
||||
@@ -21,6 +21,7 @@ export const getAssetFiles = (files: AssetFile[]) => ({
|
||||
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
|
||||
previewFile: getAssetFile(files, AssetFileType.Preview),
|
||||
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
|
||||
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
|
||||
});
|
||||
|
||||
export const addAssets = async (
|
||||
|
||||
43
server/test/fixtures/asset.stub.ts
vendored
43
server/test/fixtures/asset.stub.ts
vendored
@@ -24,6 +24,18 @@ const fullsizeFile: AssetFile = {
|
||||
path: '/uploads/user-id/fullsize/path.webp',
|
||||
};
|
||||
|
||||
const sidecarFileWithExt: AssetFile = {
|
||||
id: 'sidecar-with-ext',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/original/path.ext.xmp',
|
||||
};
|
||||
|
||||
const sidecarFileWithoutExt: AssetFile = {
|
||||
id: 'sidecar-without-ext',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/original/path.xmp',
|
||||
};
|
||||
|
||||
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
|
||||
|
||||
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
|
||||
@@ -51,8 +63,8 @@ export const assetStub = {
|
||||
fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'),
|
||||
originalPath: '/original/path.jpg',
|
||||
originalFileName: 'IMG_123.jpg',
|
||||
sidecarPath: null,
|
||||
fileSizeInByte: 12_345,
|
||||
files: [],
|
||||
...asset,
|
||||
}),
|
||||
noResizePath: Object.freeze({
|
||||
@@ -81,7 +93,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
exifInfo: {} as Exif,
|
||||
sidecarPath: null,
|
||||
deletedAt: null,
|
||||
isExternal: false,
|
||||
duplicateId: null,
|
||||
@@ -117,7 +128,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'IMG_456.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
isExternal: false,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 123_000,
|
||||
@@ -157,7 +167,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -194,7 +203,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 1000,
|
||||
@@ -243,7 +251,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -285,7 +292,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -328,7 +334,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -367,7 +372,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
exifImageHeight: 3840,
|
||||
@@ -409,7 +413,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -448,7 +451,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -490,7 +492,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -526,7 +527,6 @@ export const assetStub = {
|
||||
livePhotoVideoId: null,
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 100_000,
|
||||
exifImageHeight: 2160,
|
||||
@@ -553,6 +553,7 @@ export const assetStub = {
|
||||
fileSizeInByte: 100_000,
|
||||
timeZone: `America/New_York`,
|
||||
},
|
||||
files: [] as AssetFile[],
|
||||
libraryId: null,
|
||||
visibility: AssetVisibility.Hidden,
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
|
||||
@@ -573,7 +574,7 @@ export const assetStub = {
|
||||
files,
|
||||
faces: [] as AssetFace[],
|
||||
visibility: AssetVisibility.Timeline,
|
||||
} as MapAsset & { faces: AssetFace[] }),
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
|
||||
|
||||
livePhotoWithOriginalFileName: Object.freeze({
|
||||
id: 'live-photo-still-asset',
|
||||
@@ -589,10 +590,11 @@ export const assetStub = {
|
||||
fileSizeInByte: 25_000,
|
||||
timeZone: `America/New_York`,
|
||||
},
|
||||
files: [] as AssetFile[],
|
||||
libraryId: null,
|
||||
faces: [] as AssetFace[],
|
||||
visibility: AssetVisibility.Timeline,
|
||||
} as MapAsset & { faces: AssetFace[] }),
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
|
||||
|
||||
withLocation: Object.freeze({
|
||||
id: 'asset-with-favorite-id',
|
||||
@@ -605,7 +607,6 @@ export const assetStub = {
|
||||
deviceId: 'device-id',
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
originalPath: '/original/path.ext',
|
||||
sidecarPath: null,
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
thumbhash: null,
|
||||
@@ -652,7 +653,7 @@ export const assetStub = {
|
||||
thumbhash: null,
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
files: [previewFile, sidecarFileWithExt],
|
||||
encodedVideoPath: null,
|
||||
createdAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
@@ -665,7 +666,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: '/original/path.ext.xmp',
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -688,7 +688,7 @@ export const assetStub = {
|
||||
thumbhash: null,
|
||||
checksum: Buffer.from('file hash', 'utf8'),
|
||||
type: AssetType.Image,
|
||||
files: [previewFile],
|
||||
files: [previewFile, sidecarFileWithoutExt],
|
||||
encodedVideoPath: null,
|
||||
createdAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||
@@ -701,7 +701,6 @@ export const assetStub = {
|
||||
sharedLinks: [],
|
||||
originalFileName: 'asset-id.ext',
|
||||
faces: [],
|
||||
sidecarPath: '/original/path.xmp',
|
||||
deletedAt: null,
|
||||
duplicateId: null,
|
||||
isOffline: false,
|
||||
@@ -734,7 +733,6 @@ export const assetStub = {
|
||||
livePhotoVideoId: null,
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 100_000,
|
||||
} as Exif,
|
||||
@@ -776,7 +774,6 @@ export const assetStub = {
|
||||
originalFileName: 'photo.jpg',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
} as Exif,
|
||||
@@ -812,7 +809,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.dng',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
@@ -853,7 +849,6 @@ export const assetStub = {
|
||||
originalFileName: 'asset-id.hif',
|
||||
faces: [],
|
||||
deletedAt: null,
|
||||
sidecarPath: null,
|
||||
exifInfo: {
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { JobName, SharedLinkType } from 'src/enum';
|
||||
import { AssetFileType, JobName, SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
@@ -184,7 +184,15 @@ describe(AssetService.name, () => {
|
||||
jobRepo.queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, sidecarPath: '/path/to/my/sidecar.xmp' });
|
||||
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newAssetFile({
|
||||
assetId: oldAsset.id,
|
||||
path: '/path/to/my/sidecar.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
|
||||
@@ -82,7 +82,11 @@ describe(MetadataService.name, () => {
|
||||
process.env.TZ = serverTimeZone ?? undefined;
|
||||
|
||||
const { filePath } = await createTestFile(exifData);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as any);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
id: 'asset-1',
|
||||
originalPath: filePath,
|
||||
files: [],
|
||||
} as any);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: 'asset-1' });
|
||||
|
||||
|
||||
@@ -113,13 +113,13 @@ describe(WorkflowService.name, () => {
|
||||
enabled: true,
|
||||
filters: [
|
||||
{
|
||||
filterId: testFilterId,
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
actionId: testActionId,
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
},
|
||||
],
|
||||
@@ -137,7 +137,7 @@ describe(WorkflowService.name, () => {
|
||||
expect(workflow.filters[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
filterId: testFilterId,
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
order: 0,
|
||||
});
|
||||
@@ -146,7 +146,7 @@ describe(WorkflowService.name, () => {
|
||||
expect(workflow.actions[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
actionId: testActionId,
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
order: 0,
|
||||
});
|
||||
@@ -163,7 +163,7 @@ describe(WorkflowService.name, () => {
|
||||
name: 'invalid-workflow',
|
||||
description: 'A workflow with invalid filter',
|
||||
enabled: true,
|
||||
filters: [{ filterId: factory.uuid(), filterConfig: { key: 'value' } }],
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('Invalid filter ID');
|
||||
@@ -181,7 +181,7 @@ describe(WorkflowService.name, () => {
|
||||
description: 'A workflow with invalid action',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ actionId: factory.uuid(), actionConfig: { action: 'test' } }],
|
||||
actions: [{ pluginActionId: factory.uuid(), actionConfig: { action: 'test' } }],
|
||||
}),
|
||||
).rejects.toThrow('Invalid action ID');
|
||||
});
|
||||
@@ -220,7 +220,7 @@ describe(WorkflowService.name, () => {
|
||||
name: 'invalid-context-workflow',
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [{ filterId: result.filters[0].id }],
|
||||
filters: [{ pluginFilterId: result.filters[0].id }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
@@ -261,7 +261,7 @@ describe(WorkflowService.name, () => {
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ actionId: result.actions[0].id }],
|
||||
actions: [{ pluginActionId: result.actions[0].id }],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
});
|
||||
@@ -277,13 +277,13 @@ describe(WorkflowService.name, () => {
|
||||
description: 'A workflow with multiple filters and actions',
|
||||
enabled: true,
|
||||
filters: [
|
||||
{ filterId: testFilterId, filterConfig: { step: 1 } },
|
||||
{ filterId: testFilterId, filterConfig: { step: 2 } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 1 } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 2 } },
|
||||
],
|
||||
actions: [
|
||||
{ actionId: testActionId, actionConfig: { step: 1 } },
|
||||
{ actionId: testActionId, actionConfig: { step: 2 } },
|
||||
{ actionId: testActionId, actionConfig: { step: 3 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 1 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 2 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 3 } },
|
||||
],
|
||||
});
|
||||
|
||||
@@ -378,8 +378,8 @@ describe(WorkflowService.name, () => {
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [{ filterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [{ actionId: testActionId, actionConfig: { action: 'test' } }],
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { action: 'test' } }],
|
||||
});
|
||||
|
||||
const workflow = await sut.get(auth, created.id);
|
||||
@@ -461,14 +461,14 @@ describe(WorkflowService.name, () => {
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ filterId: testFilterId, filterConfig: { old: 'config' } }],
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { old: 'config' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
filters: [
|
||||
{ filterId: testFilterId, filterConfig: { new: 'config' } },
|
||||
{ filterId: testFilterId, filterConfig: { second: 'filter' } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { new: 'config' } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { second: 'filter' } },
|
||||
],
|
||||
});
|
||||
|
||||
@@ -488,13 +488,13 @@ describe(WorkflowService.name, () => {
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ actionId: testActionId, actionConfig: { old: 'config' } }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { old: 'config' } }],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
actions: [
|
||||
{ actionId: testActionId, actionConfig: { new: 'config' } },
|
||||
{ actionId: testActionId, actionConfig: { second: 'action' } },
|
||||
{ pluginActionId: testActionId, actionConfig: { new: 'config' } },
|
||||
{ pluginActionId: testActionId, actionConfig: { second: 'action' } },
|
||||
],
|
||||
});
|
||||
|
||||
@@ -513,7 +513,7 @@ describe(WorkflowService.name, () => {
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ filterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
@@ -588,7 +588,7 @@ describe(WorkflowService.name, () => {
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, {
|
||||
filters: [{ filterId: factory.uuid(), filterConfig: {} }],
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: {} }],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
@@ -608,7 +608,7 @@ describe(WorkflowService.name, () => {
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, { actions: [{ actionId: factory.uuid(), actionConfig: {} }] }),
|
||||
sut.update(auth, created.id, { actions: [{ pluginActionId: factory.uuid(), actionConfig: {} }] }),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
@@ -643,8 +643,8 @@ describe(WorkflowService.name, () => {
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ filterId: testFilterId, filterConfig: {} }],
|
||||
actions: [{ actionId: testActionId, actionConfig: {} }],
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: {} }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: {} }],
|
||||
});
|
||||
|
||||
await sut.delete(auth, workflow.id);
|
||||
|
||||
@@ -10,6 +10,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
||||
updateAllExif: vitest.fn(),
|
||||
updateDateTimeOriginal: vitest.fn().mockResolvedValue([]),
|
||||
upsertJobStatus: vitest.fn(),
|
||||
getForCopy: vitest.fn(),
|
||||
getByDayOfYear: vitest.fn(),
|
||||
getByIds: vitest.fn().mockResolvedValue([]),
|
||||
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
|
||||
@@ -36,6 +37,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
||||
getChangedDeltaSync: vitest.fn(),
|
||||
upsertFile: vitest.fn(),
|
||||
upsertFiles: vitest.fn(),
|
||||
deleteFile: vitest.fn(),
|
||||
deleteFiles: vitest.fn(),
|
||||
detectOfflineExternalAssets: vitest.fn(),
|
||||
filterNewExternalAssetPaths: vitest.fn(),
|
||||
|
||||
@@ -8,14 +8,22 @@ import {
|
||||
Memory,
|
||||
Partner,
|
||||
Session,
|
||||
SidecarWriteAsset,
|
||||
User,
|
||||
UserAdmin,
|
||||
} from 'src/database';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { QueueStatisticsDto } from 'src/dtos/queue.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility, MemoryType, Permission, UserMetadataKey, UserStatus } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
MemoryType,
|
||||
Permission,
|
||||
UserMetadataKey,
|
||||
UserStatus,
|
||||
} from 'src/enum';
|
||||
import { OnThisDayData, UserMetadataItem } from 'src/types';
|
||||
import { v4, v7 } from 'uuid';
|
||||
|
||||
@@ -237,7 +245,6 @@ const assetFactory = (asset: Partial<MapAsset> = {}) => ({
|
||||
originalFileName: 'IMG_123.jpg',
|
||||
originalPath: `/data/12/34/IMG_123.jpg`,
|
||||
ownerId: newUuid(),
|
||||
sidecarPath: null,
|
||||
stackId: null,
|
||||
thumbhash: null,
|
||||
type: AssetType.Image,
|
||||
@@ -312,12 +319,17 @@ const versionHistoryFactory = () => ({
|
||||
version: '1.123.45',
|
||||
});
|
||||
|
||||
const assetSidecarWriteFactory = (asset: Partial<SidecarWriteAsset> = {}) => ({
|
||||
const assetSidecarWriteFactory = () => ({
|
||||
id: newUuid(),
|
||||
sidecarPath: '/path/to/original-path.jpg.xmp',
|
||||
originalPath: '/path/to/original-path.jpg.xmp',
|
||||
tags: [],
|
||||
...asset,
|
||||
files: [
|
||||
{
|
||||
id: newUuid(),
|
||||
path: '/path/to/original-path.jpg.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const assetOcrFactory = (
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
"@koddsson/eslint-plugin-tscompat": "^0.2.0",
|
||||
"@socket.io/component-emitter": "^3.1.0",
|
||||
"@sveltejs/adapter-static": "^3.0.8",
|
||||
"@sveltejs/enhanced-img": "^0.8.0",
|
||||
"@sveltejs/enhanced-img": "^0.9.0",
|
||||
"@sveltejs/kit": "^2.27.1",
|
||||
"@sveltejs/vite-plugin-svelte": "6.2.1",
|
||||
"@tailwindcss/vite": "^4.1.7",
|
||||
|
||||
@@ -376,7 +376,7 @@
|
||||
}
|
||||
}}
|
||||
>
|
||||
{#snippet children({ feature }: { feature: Feature<Geometry, GeoJsonProperties> })}
|
||||
{#snippet children({ feature }: { feature: Feature })}
|
||||
{#if useLocationPin}
|
||||
<Icon icon={mdiMapMarker} size="50px" class="text-primary -translate-y-[50%]" />
|
||||
{:else}
|
||||
|
||||
@@ -62,8 +62,16 @@ export class TreeNode extends Map<string, TreeNode> {
|
||||
const child = this.values().next().value!;
|
||||
child.value = joinPaths(this.value, child.value);
|
||||
child.parent = this.parent;
|
||||
this.parent.delete(this.value);
|
||||
this.parent.set(child.value, child);
|
||||
|
||||
const entries = Array.from(this.parent.entries());
|
||||
this.parent.clear();
|
||||
for (const [key, value] of entries) {
|
||||
if (key === this.value) {
|
||||
this.parent.set(child.value, child);
|
||||
} else {
|
||||
this.parent.set(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const child of this.values()) {
|
||||
|
||||
Reference in New Issue
Block a user