Compare commits

..

2 Commits

Author SHA1 Message Date
Zack Pollard
5b0ea3397f wip 2025-05-19 18:16:23 +01:00
Thomas R. Koll
14970c5539 chore: reverting to multiline commands in docker-compose.yml files (#17309) 2025-05-19 08:52:35 -05:00
26 changed files with 187 additions and 442 deletions

View File

@@ -77,12 +77,22 @@ services:
- 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics

View File

@@ -67,12 +67,22 @@ services:
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always
volumes:

View File

@@ -122,9 +122,7 @@ Class | Method | HTTP request | Description
*DeprecatedApi* | [**getRandom**](doc//DeprecatedApi.md#getrandom) | **GET** /assets/random |
*DownloadApi* | [**downloadArchive**](doc//DownloadApi.md#downloadarchive) | **POST** /download/archive |
*DownloadApi* | [**getDownloadInfo**](doc//DownloadApi.md#getdownloadinfo) | **POST** /download/info |
*DuplicatesApi* | [**deduplicateAll**](doc//DuplicatesApi.md#deduplicateall) | **POST** /duplicates/bulk/deduplicate |
*DuplicatesApi* | [**getAssetDuplicates**](doc//DuplicatesApi.md#getassetduplicates) | **GET** /duplicates |
*DuplicatesApi* | [**keepAll**](doc//DuplicatesApi.md#keepall) | **POST** /duplicates/bulk/keep |
*FacesApi* | [**createFace**](doc//FacesApi.md#createface) | **POST** /faces |
*FacesApi* | [**deleteFace**](doc//FacesApi.md#deleteface) | **DELETE** /faces/{id} |
*FacesApi* | [**getFaces**](doc//FacesApi.md#getfaces) | **GET** /faces |
@@ -329,7 +327,6 @@ Class | Method | HTTP request | Description
- [CreateLibraryDto](doc//CreateLibraryDto.md)
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
- [DeduplicateAllDto](doc//DeduplicateAllDto.md)
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
- [DownloadInfoDto](doc//DownloadInfoDto.md)
- [DownloadResponse](doc//DownloadResponse.md)

View File

@@ -122,7 +122,6 @@ part 'model/create_album_dto.dart';
part 'model/create_library_dto.dart';
part 'model/create_profile_image_response_dto.dart';
part 'model/database_backup_config.dart';
part 'model/deduplicate_all_dto.dart';
part 'model/download_archive_info.dart';
part 'model/download_info_dto.dart';
part 'model/download_response.dart';

View File

@@ -16,45 +16,6 @@ class DuplicatesApi {
final ApiClient apiClient;
/// Performs an HTTP 'POST /duplicates/bulk/deduplicate' operation and returns the [Response].
/// Parameters:
///
/// * [DeduplicateAllDto] deduplicateAllDto (required):
Future<Response> deduplicateAllWithHttpInfo(DeduplicateAllDto deduplicateAllDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/duplicates/bulk/deduplicate';
// ignore: prefer_final_locals
Object? postBody = deduplicateAllDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [DeduplicateAllDto] deduplicateAllDto (required):
Future<void> deduplicateAll(DeduplicateAllDto deduplicateAllDto,) async {
final response = await deduplicateAllWithHttpInfo(deduplicateAllDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'GET /duplicates' operation and returns the [Response].
Future<Response> getAssetDuplicatesWithHttpInfo() async {
// ignore: prefer_const_declarations
@@ -98,37 +59,4 @@ class DuplicatesApi {
}
return null;
}
/// Performs an HTTP 'POST /duplicates/bulk/keep' operation and returns the [Response].
Future<Response> keepAllWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/duplicates/bulk/keep';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
Future<void> keepAll() async {
final response = await keepAllWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
}

View File

@@ -300,8 +300,6 @@ class ApiClient {
return CreateProfileImageResponseDto.fromJson(value);
case 'DatabaseBackupConfig':
return DatabaseBackupConfig.fromJson(value);
case 'DeduplicateAllDto':
return DeduplicateAllDto.fromJson(value);
case 'DownloadArchiveInfo':
return DownloadArchiveInfo.fromJson(value);
case 'DownloadInfoDto':

View File

@@ -18,6 +18,7 @@ class AllJobStatusResponseDto {
required this.duplicateDetection,
required this.faceDetection,
required this.facialRecognition,
required this.integrityDatabaseCheck,
required this.library_,
required this.metadataExtraction,
required this.migration,
@@ -40,6 +41,8 @@ class AllJobStatusResponseDto {
JobStatusDto facialRecognition;
JobStatusDto integrityDatabaseCheck;
JobStatusDto library_;
JobStatusDto metadataExtraction;
@@ -67,6 +70,7 @@ class AllJobStatusResponseDto {
other.duplicateDetection == duplicateDetection &&
other.faceDetection == faceDetection &&
other.facialRecognition == facialRecognition &&
other.integrityDatabaseCheck == integrityDatabaseCheck &&
other.library_ == library_ &&
other.metadataExtraction == metadataExtraction &&
other.migration == migration &&
@@ -86,6 +90,7 @@ class AllJobStatusResponseDto {
(duplicateDetection.hashCode) +
(faceDetection.hashCode) +
(facialRecognition.hashCode) +
(integrityDatabaseCheck.hashCode) +
(library_.hashCode) +
(metadataExtraction.hashCode) +
(migration.hashCode) +
@@ -98,7 +103,7 @@ class AllJobStatusResponseDto {
(videoConversion.hashCode);
@override
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, integrityDatabaseCheck=$integrityDatabaseCheck, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -107,6 +112,7 @@ class AllJobStatusResponseDto {
json[r'duplicateDetection'] = this.duplicateDetection;
json[r'faceDetection'] = this.faceDetection;
json[r'facialRecognition'] = this.facialRecognition;
json[r'integrityDatabaseCheck'] = this.integrityDatabaseCheck;
json[r'library'] = this.library_;
json[r'metadataExtraction'] = this.metadataExtraction;
json[r'migration'] = this.migration;
@@ -134,6 +140,7 @@ class AllJobStatusResponseDto {
duplicateDetection: JobStatusDto.fromJson(json[r'duplicateDetection'])!,
faceDetection: JobStatusDto.fromJson(json[r'faceDetection'])!,
facialRecognition: JobStatusDto.fromJson(json[r'facialRecognition'])!,
integrityDatabaseCheck: JobStatusDto.fromJson(json[r'integrityDatabaseCheck'])!,
library_: JobStatusDto.fromJson(json[r'library'])!,
metadataExtraction: JobStatusDto.fromJson(json[r'metadataExtraction'])!,
migration: JobStatusDto.fromJson(json[r'migration'])!,
@@ -196,6 +203,7 @@ class AllJobStatusResponseDto {
'duplicateDetection',
'faceDetection',
'facialRecognition',
'integrityDatabaseCheck',
'library',
'metadataExtraction',
'migration',

View File

@@ -1,101 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class DeduplicateAllDto {
/// Returns a new [DeduplicateAllDto] instance.
DeduplicateAllDto({
this.assetIdsToKeep = const [],
});
List<String> assetIdsToKeep;
@override
bool operator ==(Object other) => identical(this, other) || other is DeduplicateAllDto &&
_deepEquality.equals(other.assetIdsToKeep, assetIdsToKeep);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(assetIdsToKeep.hashCode);
@override
String toString() => 'DeduplicateAllDto[assetIdsToKeep=$assetIdsToKeep]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'assetIdsToKeep'] = this.assetIdsToKeep;
return json;
}
/// Returns a new [DeduplicateAllDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static DeduplicateAllDto? fromJson(dynamic value) {
upgradeDto(value, "DeduplicateAllDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return DeduplicateAllDto(
assetIdsToKeep: json[r'assetIdsToKeep'] is Iterable
? (json[r'assetIdsToKeep'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<DeduplicateAllDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <DeduplicateAllDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = DeduplicateAllDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, DeduplicateAllDto> mapFromJson(dynamic json) {
final map = <String, DeduplicateAllDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = DeduplicateAllDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of DeduplicateAllDto-objects as value to a dart map
static Map<String, List<DeduplicateAllDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<DeduplicateAllDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = DeduplicateAllDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'assetIdsToKeep',
};
}

View File

@@ -38,6 +38,7 @@ class JobName {
static const library_ = JobName._(r'library');
static const notifications = JobName._(r'notifications');
static const backupDatabase = JobName._(r'backupDatabase');
static const integrityDatabaseCheck = JobName._(r'integrityDatabaseCheck');
/// List of all possible values in this [enum][JobName].
static const values = <JobName>[
@@ -56,6 +57,7 @@ class JobName {
library_,
notifications,
backupDatabase,
integrityDatabaseCheck,
];
static JobName? fromJson(dynamic value) => JobNameTypeTransformer().decode(value);
@@ -109,6 +111,7 @@ class JobNameTypeTransformer {
case r'library': return JobName.library_;
case r'notifications': return JobName.notifications;
case r'backupDatabase': return JobName.backupDatabase;
case r'integrityDatabaseCheck': return JobName.integrityDatabaseCheck;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -29,6 +29,7 @@ class ManualJobName {
static const memoryCleanup = ManualJobName._(r'memory-cleanup');
static const memoryCreate = ManualJobName._(r'memory-create');
static const backupDatabase = ManualJobName._(r'backup-database');
static const integrityDatabaseCheck = ManualJobName._(r'integrity-database-check');
/// List of all possible values in this [enum][ManualJobName].
static const values = <ManualJobName>[
@@ -38,6 +39,7 @@ class ManualJobName {
memoryCleanup,
memoryCreate,
backupDatabase,
integrityDatabaseCheck,
];
static ManualJobName? fromJson(dynamic value) => ManualJobNameTypeTransformer().decode(value);
@@ -82,6 +84,7 @@ class ManualJobNameTypeTransformer {
case r'memory-cleanup': return ManualJobName.memoryCleanup;
case r'memory-create': return ManualJobName.memoryCreate;
case r'backup-database': return ManualJobName.backupDatabase;
case r'integrity-database-check': return ManualJobName.integrityDatabaseCheck;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -2732,66 +2732,6 @@
]
}
},
"/duplicates/bulk/deduplicate": {
"post": {
"operationId": "deduplicateAll",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/DeduplicateAllDto"
}
}
},
"required": true
},
"responses": {
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Duplicates"
]
}
},
"/duplicates/bulk/keep": {
"post": {
"operationId": "keepAll",
"parameters": [],
"responses": {
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Duplicates"
]
}
},
"/faces": {
"get": {
"operationId": "getFaces",
@@ -8592,6 +8532,9 @@
"facialRecognition": {
"$ref": "#/components/schemas/JobStatusDto"
},
"integrityDatabaseCheck": {
"$ref": "#/components/schemas/JobStatusDto"
},
"library": {
"$ref": "#/components/schemas/JobStatusDto"
},
@@ -8629,6 +8572,7 @@
"duplicateDetection",
"faceDetection",
"facialRecognition",
"integrityDatabaseCheck",
"library",
"metadataExtraction",
"migration",
@@ -9715,21 +9659,6 @@
],
"type": "object"
},
"DeduplicateAllDto": {
"properties": {
"assetIdsToKeep": {
"items": {
"format": "uuid",
"type": "string"
},
"type": "array"
}
},
"required": [
"assetIdsToKeep"
],
"type": "object"
},
"DownloadArchiveInfo": {
"properties": {
"assetIds": {
@@ -10176,7 +10105,8 @@
"sidecar",
"library",
"notifications",
"backupDatabase"
"backupDatabase",
"integrityDatabaseCheck"
],
"type": "string"
},
@@ -10411,7 +10341,8 @@
"user-cleanup",
"memory-cleanup",
"memory-create",
"backup-database"
"backup-database",
"integrity-database-check"
],
"type": "string"
},

View File

@@ -560,9 +560,6 @@ export type DuplicateResponseDto = {
assets: AssetResponseDto[];
duplicateId: string;
};
export type DeduplicateAllDto = {
assetIdsToKeep: string[];
};
export type PersonResponseDto = {
birthDate: string | null;
/** This property was added in v1.126.0 */
@@ -625,6 +622,7 @@ export type AllJobStatusResponseDto = {
duplicateDetection: JobStatusDto;
faceDetection: JobStatusDto;
facialRecognition: JobStatusDto;
integrityDatabaseCheck: JobStatusDto;
library: JobStatusDto;
metadataExtraction: JobStatusDto;
migration: JobStatusDto;
@@ -2179,21 +2177,6 @@ export function getAssetDuplicates(opts?: Oazapfts.RequestOpts) {
...opts
}));
}
export function deduplicateAll({ deduplicateAllDto }: {
deduplicateAllDto: DeduplicateAllDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/duplicates/bulk/deduplicate", oazapfts.json({
...opts,
method: "POST",
body: deduplicateAllDto
})));
}
export function keepAll(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/duplicates/bulk/keep", {
...opts,
method: "POST"
}));
}
export function getFaces({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
@@ -3807,7 +3790,8 @@ export enum ManualJobName {
UserCleanup = "user-cleanup",
MemoryCleanup = "memory-cleanup",
MemoryCreate = "memory-create",
BackupDatabase = "backup-database"
BackupDatabase = "backup-database",
IntegrityDatabaseCheck = "integrity-database-check"
}
export enum JobName {
ThumbnailGeneration = "thumbnailGeneration",
@@ -3824,7 +3808,8 @@ export enum JobName {
Sidecar = "sidecar",
Library = "library",
Notifications = "notifications",
BackupDatabase = "backupDatabase"
BackupDatabase = "backupDatabase",
IntegrityDatabaseCheck = "integrityDatabaseCheck"
}
export enum JobCommand {
Start = "start",

View File

@@ -1,8 +1,7 @@
import { Body, Controller, Get, Post } from '@nestjs/common';
import { Controller, Get } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import { DeduplicateAllDto, DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { Permission } from 'src/enum';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { DuplicateService } from 'src/services/duplicate.service';
@@ -16,16 +15,4 @@ export class DuplicateController {
getAssetDuplicates(@Auth() auth: AuthDto): Promise<DuplicateResponseDto[]> {
return this.service.getDuplicates(auth);
}
@Post('/bulk/keep')
@Authenticated({ permission: Permission.ASSET_UPDATE })
async keepAll(@Auth() auth: AuthDto) {
await this.service.keepAll(auth);
}
@Post('/bulk/deduplicate')
@Authenticated({ permission: Permission.ASSET_DELETE })
async deduplicateAll(@Auth() auth: AuthDto, @Body() dto: DeduplicateAllDto) {
await this.service.deduplicateAll(auth, dto);
}
}

View File

@@ -12,9 +12,3 @@ export class ResolveDuplicatesDto {
@ValidateUUID({ each: true })
assetIds!: string[];
}
export class DeduplicateAllDto {
@IsNotEmpty()
@ValidateUUID({ each: true })
assetIdsToKeep!: string[];
}

View File

@@ -99,4 +99,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
@ApiProperty({ type: JobStatusDto })
[QueueName.BACKUP_DATABASE]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.DATABASE_INTEGRITY_CHECK]!: JobStatusDto;
}

View File

@@ -251,6 +251,7 @@ export enum ManualJobName {
MEMORY_CLEANUP = 'memory-cleanup',
MEMORY_CREATE = 'memory-create',
BACKUP_DATABASE = 'backup-database',
INTEGRITY_DATABASE_CHECK = 'integrity-database-check',
}
export enum AssetPathType {
@@ -441,6 +442,7 @@ export enum QueueName {
LIBRARY = 'library',
NOTIFICATION = 'notifications',
BACKUP_DATABASE = 'backupDatabase',
DATABASE_INTEGRITY_CHECK = 'integrityDatabaseCheck',
}
export enum JobName {
@@ -532,6 +534,9 @@ export enum JobName {
// Version check
VERSION_CHECK = 'version-check',
// Integrity
DATABASE_INTEGRITY_CHECK = 'database-integrity-check',
}
export enum JobCommand {

View File

@@ -463,3 +463,12 @@ where
and "libraryId" = $2::uuid
and "isExternal" = $3
)
-- AssetRepository.integrityCheckExif
select
"id"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
where
"exif"."assetId" is null

View File

@@ -146,17 +146,10 @@ export class AssetJobRepository {
@GenerateSql({ params: [], stream: true })
streamForSearchDuplicates(force?: boolean) {
return this.db
.selectFrom('assets')
return this.assetsWithPreviews()
.where((eb) => eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))))
.$if(!force, (qb) => qb.where('job_status.duplicatesDetectedAt', 'is', null))
.select(['assets.id'])
.where('assets.visibility', '!=', AssetVisibility.HIDDEN)
.where('assets.deletedAt', 'is', null)
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.$if(!force, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.duplicatesDetectedAt', 'is', null),
)
.stream();
}

View File

@@ -632,100 +632,57 @@ export class AssetRepository {
@GenerateSql({ params: [DummyValue.UUID] })
getDuplicates(userId: string) {
return this.db
.with('duplicates', (qb) =>
qb
.selectFrom('assets')
.innerJoin('exif', 'assets.id', 'exif.assetId')
.leftJoinLateral(
(qb) =>
qb
.selectFrom(sql`(select 1)`.as('dummy'))
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) => eb.fn.jsonAgg('asset').$castTo<MapAsset[]>().as('assets'))
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.visibility', '!=', AssetVisibility.HIDDEN)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('json_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
streamDuplicates(userId: string) {
return this.db
.selectFrom('assets')
.innerJoin('exif', 'assets.id', 'exif.assetId')
.innerJoinLateral(
(qb) =>
return (
this.db
.with('duplicates', (qb) =>
qb
.selectFrom(sql`(select 1)`.as('dummy'))
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) => eb.fn.jsonAgg('asset').as('assets'))
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.visibility', '!=', AssetVisibility.HIDDEN)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId')
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
keepAllDuplicates(userId: string) {
return this.db
.updateTable('assets')
.set({ duplicateId: null })
.where('duplicateId', 'is not', null)
.where('ownerId', '=', userId)
.execute();
}
deduplicateAll(userId: string, keptAssetIds: string[], deduplicatedStatus: AssetStatus) {
return this.db
.with('kept', (qb) =>
// anyUuid ensures the array is passed as a single parameter, so no need to chunk
qb.updateTable('assets').set({ duplicateId: null }).where('id', '=', anyUuid(keptAssetIds)).returning('id'),
)
.updateTable('assets')
.from('kept')
.set({ duplicateId: null, status: deduplicatedStatus })
.whereRef('id', '!=', 'kept.id')
.where('duplicateId', 'is not', null)
.where('ownerId', '=', userId)
.execute();
.selectFrom('assets')
.leftJoinLateral(
(qb) =>
qb
.selectFrom('exif')
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.whereRef('exif.assetId', '=', 'assets.id')
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) =>
eb
.fn('jsonb_agg', [eb.table('asset')])
.$castTo<MapAsset[]>()
.as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.visibility', '!=', AssetVisibility.HIDDEN)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('jsonb_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
// TODO: compare with filtering by jsonb_array_length > 1
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute()
);
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
@@ -918,4 +875,16 @@ export class AssetRepository {
return count;
}
@GenerateSql()
async integrityCheckExif(): Promise<string[]> {
const result = await this.db
.selectFrom('assets')
.select('id')
.leftJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.assetId', 'is', null)
.execute();
return result.map((row) => row.id);
}
}

View File

@@ -3,8 +3,8 @@ import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DeduplicateAllDto, DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { AssetFileType, AssetStatus, AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { AssetFileType, AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum';
import { AssetDuplicateResult } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
@@ -21,20 +21,6 @@ export class DuplicateService extends BaseService {
}));
}
keepAll(auth: AuthDto) {
return this.assetRepository.keepAllDuplicates(auth.user.id);
}
async deduplicateAll(auth: AuthDto, dto: DeduplicateAllDto) {
if (dto.assetIdsToKeep.length === 0) {
return;
}
const { trash } = await this.getConfig({ withCache: false });
const deduplicatedStatus = trash.enabled ? AssetStatus.TRASHED : AssetStatus.DELETED;
return this.assetRepository.deduplicateAll(auth.user.id, dto.assetIdsToKeep, deduplicatedStatus);
}
@OnJob({ name: JobName.QUEUE_DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION })
async handleQueueSearchDuplicates({ force }: JobOf<JobName.QUEUE_DUPLICATE_DETECTION>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
@@ -43,16 +29,20 @@ export class DuplicateService extends BaseService {
}
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
const assets = this.assetJobRepository.streamForSearchDuplicates(force);
for await (const asset of assets) {
jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
await queueAll();
}
}
await this.jobRepository.queueAll(jobs);
await queueAll();
return JobStatus.SUCCESS;
}

View File

@@ -11,6 +11,7 @@ import { CliService } from 'src/services/cli.service';
import { DatabaseService } from 'src/services/database.service';
import { DownloadService } from 'src/services/download.service';
import { DuplicateService } from 'src/services/duplicate.service';
import { IntegrityService } from 'src/services/integrity.service';
import { JobService } from 'src/services/job.service';
import { LibraryService } from 'src/services/library.service';
import { MapService } from 'src/services/map.service';
@@ -54,6 +55,7 @@ export const services = [
DatabaseService,
DownloadService,
DuplicateService,
IntegrityService,
JobService,
LibraryService,
MapService,

View File

@@ -0,0 +1,13 @@
import { Injectable } from '@nestjs/common';
import { OnJob } from 'src/decorators';
import { JobName, JobStatus, QueueName } from 'src/enum';
import { BaseService } from 'src/services/base.service';
@Injectable()
export class IntegrityService extends BaseService {
@OnJob({ name: JobName.DATABASE_INTEGRITY_CHECK, queue: QueueName.DATABASE_INTEGRITY_CHECK })
async handleDatabaseIntegrityCheck(): Promise<JobStatus> {
console.log(JSON.stringify(await this.assetRepository.integrityCheckExif()));
return JobStatus.SUCCESS;
}
}

View File

@@ -46,6 +46,10 @@ const asJobItem = (dto: JobCreateDto): JobItem => {
return { name: JobName.BACKUP_DATABASE };
}
case ManualJobName.INTEGRITY_DATABASE_CHECK: {
return { name: JobName.DATABASE_INTEGRITY_CHECK };
}
default: {
throw new BadRequestException('Invalid job name');
}
@@ -228,6 +232,7 @@ export class JobService extends BaseService {
QueueName.STORAGE_TEMPLATE_MIGRATION,
QueueName.DUPLICATE_DETECTION,
QueueName.BACKUP_DATABASE,
QueueName.DATABASE_INTEGRITY_CHECK,
].includes(name);
}

View File

@@ -164,6 +164,7 @@ export type ConcurrentQueueName = Exclude<
| QueueName.FACIAL_RECOGNITION
| QueueName.DUPLICATE_DETECTION
| QueueName.BACKUP_DATABASE
| QueueName.DATABASE_INTEGRITY_CHECK
>;
export type Jobs = { [K in JobItem['name']]: (JobItem & { name: K })['data'] };
@@ -363,9 +364,8 @@ export type JobItem =
// Version check
| { name: JobName.VERSION_CHECK; data: IBaseJob }
// Memories
| { name: JobName.MEMORIES_CLEANUP; data?: IBaseJob }
| { name: JobName.MEMORIES_CREATE; data?: IBaseJob };
// Integrity
| { name: JobName.DATABASE_INTEGRITY_CHECK; data?: IBaseJob };
export type VectorExtension = DatabaseExtension.VECTOR | DatabaseExtension.VECTORS;

View File

@@ -20,6 +20,7 @@
{ title: $t('admin.memory_cleanup_job'), value: ManualJobName.MemoryCleanup },
{ title: $t('admin.memory_generate_job'), value: ManualJobName.MemoryCreate },
{ title: $t('admin.backup_database'), value: ManualJobName.BackupDatabase },
{ title: 'integrity test', value: ManualJobName.IntegrityDatabaseCheck },
].map(({ value, title }) => ({ id: value, label: title, value }));
let selectedJob: ComboBoxOption | undefined = $state(undefined);

View File

@@ -15,7 +15,7 @@
import { suggestDuplicate } from '$lib/utils/duplicate-utils';
import { handleError } from '$lib/utils/handle-error';
import type { AssetResponseDto } from '@immich/sdk';
import { deduplicateAll, deleteAssets, keepAll, updateAssets } from '@immich/sdk';
import { deleteAssets, updateAssets } from '@immich/sdk';
import { Button, HStack, IconButton, Text } from '@immich/ui';
import { mdiCheckOutline, mdiInformationOutline, mdiKeyboard, mdiTrashCanOutline } from '@mdi/js';
import { t } from 'svelte-i18n';
@@ -101,30 +101,33 @@
};
const handleDeduplicateAll = async () => {
let assetCount = 0;
const assetIdsToKeep = duplicates.map((group) => suggestDuplicate(group.assets)!.id);
for (const group of duplicates) {
assetCount += group.assets.length;
assetIdsToKeep.push(suggestDuplicate(group.assets)!.id);
}
const dedupedAssetCount = assetCount - assetIdsToKeep.length;
const idsToKeep = duplicates.map((group) => suggestDuplicate(group.assets)).map((asset) => asset?.id);
const idsToDelete = duplicates.flatMap((group, i) =>
group.assets.map((asset) => asset.id).filter((asset) => asset !== idsToKeep[i]),
);
let prompt, confirmText;
if ($featureFlags.trash) {
prompt = $t('bulk_trash_duplicates_confirmation', { values: { count: dedupedAssetCount } });
prompt = $t('bulk_trash_duplicates_confirmation', { values: { count: idsToDelete.length } });
confirmText = $t('confirm');
} else {
prompt = $t('bulk_delete_duplicates_confirmation', { values: { count: dedupedAssetCount } });
prompt = $t('bulk_delete_duplicates_confirmation', { values: { count: idsToDelete.length } });
confirmText = $t('permanently_delete');
}
return withConfirmation(
async () => {
await deduplicateAll({deduplicateAllDto: { assetIdsToKeep } });
await deleteAssets({ assetBulkDeleteDto: { ids: idsToDelete, force: !$featureFlags.trash } });
await updateAssets({
assetBulkUpdateDto: {
ids: [...idsToDelete, ...idsToKeep.filter((id): id is string => !!id)],
duplicateId: null,
},
});
duplicates = [];
deletedNotification(dedupedAssetCount);
deletedNotification(idsToDelete.length);
},
prompt,
confirmText,
@@ -132,10 +135,10 @@
};
const handleKeepAll = async () => {
const assetCount = duplicates.reduce((acc, cur) => acc + cur.assets.length, 0);
const ids = duplicates.flatMap((group) => group.assets.map((asset) => asset.id));
return withConfirmation(
async () => {
await keepAll();
await updateAssets({ assetBulkUpdateDto: { ids, duplicateId: null } });
duplicates = [];
@@ -144,7 +147,7 @@
type: NotificationType.Info,
});
},
$t('bulk_keep_duplicates_confirmation', { values: { count: assetCount } }),
$t('bulk_keep_duplicates_confirmation', { values: { count: ids.length } }),
$t('confirm'),
);
};