Compare commits

..

44 Commits

Author SHA1 Message Date
mertalev
b59c4ddc9c upload asset button 2025-10-13 08:35:49 -04:00
mertalev
8128876472 remove upload-length from conventional upload e2e 2025-10-12 19:44:42 -04:00
mertalev
e9af3bf2fe linting 2025-10-12 19:39:56 -04:00
mertalev
87ca5e7b1d redundant check 2025-10-12 19:33:22 -04:00
mertalev
0d3cc89ba0 update api 2025-10-12 19:29:11 -04:00
mertalev
3e6a4f2417 lint 2025-10-12 19:21:41 -04:00
mertalev
63ff2e22d0 support conventional uploads 2025-10-12 19:18:52 -04:00
mertalev
7a32eb699c require header for incomplete uploads 2025-10-10 21:10:26 -04:00
mertalev
85a3854208 reject empty file 2025-10-10 21:08:16 -04:00
mertalev
504d8dc96c infer upload length when possible 2025-10-10 20:59:53 -04:00
mertalev
b0aa68d83a update api 2025-10-10 19:35:18 -04:00
mertalev
0ad983135c set max-age limit 2025-10-10 19:26:22 -04:00
mertalev
da52b3ebf4 add live photo e2e 2025-10-09 20:03:06 -04:00
mertalev
0be3b06a2a better abort check 2025-10-09 20:03:06 -04:00
mertalev
325f30815c unnecessary change 2025-10-09 20:03:06 -04:00
mertalev
ae2abb3cfe configurable cleanup 2025-10-09 20:03:06 -04:00
mertalev
883eb15ecb handle live photos 2025-10-09 20:03:06 -04:00
mertalev
a3d10ff46a tweak types 2025-10-09 20:03:06 -04:00
mertalev
38d2a03836 MUST NOT validation 2025-10-09 20:03:06 -04:00
mertalev
b1a2e7708e remove log 2025-10-09 20:03:06 -04:00
mertalev
57ea75bfc9 lint 2025-10-09 20:03:06 -04:00
mertalev
c295a48061 test interruption + abort 2025-10-09 20:03:06 -04:00
mertalev
ebda00fcf0 more content length test inputs 2025-10-09 20:03:06 -04:00
mertalev
4d04f80425 fix abortion return 2025-10-09 20:03:06 -04:00
mertalev
b68f70f28b typo 2025-10-09 20:03:06 -04:00
mertalev
758553672a proactive abortion 2025-10-09 20:03:06 -04:00
mertalev
1915e3ceb2 better content length handling 2025-10-09 20:03:06 -04:00
mertalev
0db8c10601 add timeout 2025-10-09 20:03:06 -04:00
mertalev
12b1a319e9 tidying 2025-10-09 20:03:06 -04:00
mertalev
6dbcf8b876 listen to upload event in e2e
test resume with real image
2025-10-09 20:03:06 -04:00
mertalev
484b73eb60 add service tests 2025-10-09 20:03:06 -04:00
mertalev
d4f3d9d6a5 add controller tests, move validation testing from e2e
revert unnecessary change

update mocks

add structured-headers to e2e deps
2025-10-09 20:03:06 -04:00
mertalev
597382a25f add note about RFC 9651
authdto

remove excess logs

use structured dictionary
2025-10-09 20:03:06 -04:00
mertalev
0105c9e2b6 clean up stale uploads
stale upload cleanup

try/catch file check
2025-10-09 20:03:06 -04:00
mertalev
071dbc1c50 unnecessary quota check 2025-10-09 20:03:06 -04:00
mertalev
97185964cb interim+500
interim+500

interim+500
2025-10-09 20:03:06 -04:00
mertalev
9f3a9030c7 more e2e tests
consistent e2e sections

decrement quota on cancel
2025-10-09 20:03:06 -04:00
mertalev
0a955e21b6 tweaks
shared pipe method

shared pipe method

require size upfront

make length optional for patch requests
2025-10-09 20:03:06 -04:00
mertalev
fb192bd310 ensure stream is closed before releasing lock 2025-10-09 20:03:06 -04:00
mertalev
a39f3f765d dto refactor
add logging

handle metadata
2025-10-09 20:03:06 -04:00
mertalev
35d3802219 backward compatibility 2025-10-09 20:03:06 -04:00
mertalev
026e367609 working e2e 2025-10-09 20:03:06 -04:00
mertalev
b3e5a381a8 interop v8 compliance 2025-10-09 20:03:06 -04:00
mertalev
7f50f268a5 chunked upload controller 2025-10-09 20:03:05 -04:00
68 changed files with 4607 additions and 103 deletions

View File

@@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/autobuild@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
category: '/language:${{matrix.language}}'

View File

@@ -124,7 +124,7 @@ jobs:
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
permissions:
contents: read
actions: read
@@ -147,7 +147,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
permissions:
contents: read
actions: read

View File

@@ -31,8 +31,6 @@ jobs:
- 'open-api/immich-openapi-specs.json'
force-filters: |
- '.github/workflows/docs-build.yml'
- '.github/workflows/docs-deploy.yml'
- 'deployment/**'
force-events: 'release'
force-branches: 'main'

View File

@@ -128,7 +128,7 @@ jobs:
name: release-apk-signed
- name: Create draft release
uses: softprops/action-gh-release@aec2ec56f94eb8180ceec724245f64ef008b89f5 # v2.4.0
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}

View File

@@ -581,7 +581,7 @@ jobs:
contents: read
services:
postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:dbf18b3ffea4a81434c65b71e20d27203baf903a0275f4341e4c16dfd901fd67
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:da52bbead5d818adaa8077c8dcdaad0aaf93038c31ad8348b51f9f0ec1310a4d
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres

View File

@@ -22,7 +22,7 @@ For organizations seeking to resell Immich, we have established the following gu
- Do not misrepresent your reseller site or services as being officially affiliated with or endorsed by Immich or our development team.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase licenses directly from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase licenses directy from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
When in doubt or if you have an edge case scenario, we encourage you to contact us directly via email to discuss the use of our trademark. We can provide clear guidance on what is acceptable and what is not. You can reach out at: questions@immich.app

View File

@@ -4,7 +4,7 @@ Immich supports the Google's Cast protocol so that photos and videos can be cast
## Enable Google Cast Support
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retrieve them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retreive them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
You can enable Google Cast support through `Account Settings > Features > Cast > Google Cast`

View File

@@ -53,5 +53,8 @@
},
"volta": {
"node": "22.20.0"
},
"dependencies": {
"structured-headers": "^2.0.2"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -561,6 +561,16 @@ export const utils = {
await utils.waitForQueueFinish(accessToken, 'sidecar');
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
},
downloadAsset: async (accessToken: string, id: string) => {
const downloadedRes = await fetch(`${baseUrl}/api/assets/${id}/original`, {
headers: asBearerAuth(accessToken),
});
if (!downloadedRes.ok) {
throw new Error(`Failed to download asset ${id}: ${downloadedRes.status} ${await downloadedRes.text()}`);
}
return await downloadedRes.blob();
},
};
utils.initSdk();

View File

@@ -304,6 +304,7 @@ interface NativeSyncApi {
fun getAssetsCountSince(albumId: String, timestamp: Long): Long
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
fun uploadAsset(callback: (Result<Boolean>) -> Unit)
fun cancelHashing()
companion object {
@@ -467,6 +468,24 @@ interface NativeSyncApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
api.uploadAsset{ result: Result<Boolean> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(MessagesPigeonUtils.wrapError(error))
} else {
val data = result.getOrNull()
reply.reply(MessagesPigeonUtils.wrapResult(data))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing$separatedMessageChannelSuffix", codec)
if (api != null) {

View File

@@ -363,6 +363,7 @@ protocol NativeSyncApi {
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
func uploadAsset(completion: @escaping (Result<Bool, Error>) -> Void)
func cancelHashing() throws
}
@@ -519,6 +520,21 @@ class NativeSyncApiSetup {
} else {
hashAssetsChannel.setMessageHandler(nil)
}
let uploadAssetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
uploadAssetChannel.setMessageHandler { _, reply in
api.uploadAsset { result in
switch result {
case .success(let res):
reply(wrapResult(res))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
uploadAssetChannel.setMessageHandler(nil)
}
let cancelHashingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
cancelHashingChannel.setMessageHandler { _, reply in

View File

@@ -363,4 +363,38 @@ class NativeSyncApiImpl: NativeSyncApi {
PHAssetResourceManager.default().cancelDataRequest(requestId)
})
}
func uploadAsset(completion: @escaping (Result<Bool, Error>) -> Void) {
let bufferSize = 200 * 1024 * 1024
var buffer = Data(count: bufferSize)
buffer.withUnsafeMutableBytes { bufferPointer in
arc4random_buf(bufferPointer.baseAddress!, bufferSize)
}
var hasher = Insecure.SHA1()
hasher.update(data: buffer)
let checksum = Data(hasher.finalize()).base64EncodedString()
let tempDirectory = FileManager.default.temporaryDirectory
let tempFileURL = tempDirectory.appendingPathComponent("buffer.tmp")
do {
try buffer.write(to: tempFileURL)
print("File saved to: \(tempFileURL.path)")
} catch {
print("Error writing file: \(error)")
return completion(Result.failure(error))
}
let config = URLSessionConfiguration.background(withIdentifier: "app.mertalev.immich.upload")
let session = URLSession(configuration: config)
var request = URLRequest(url: URL(string: "https://<hardcoded-host>/api/upload")!)
request.httpMethod = "POST"
request.setValue("<hardcoded-api-key>", forHTTPHeaderField: "X-Api-Key")
request.setValue("filename=\"test-image.jpg\", device-asset-id=\"rufh\", device-id=\"test\", file-created-at=\"2025-01-02T00:00:00.000Z\", file-modified-at=\"2025-01-01T00:00:00.000Z\", is-favorite, icloud-id=\"example-icloud-id\"", forHTTPHeaderField: "X-Immich-Asset-Data")
request.setValue("sha=:\(checksum):", forHTTPHeaderField: "Repr-Digest")
let task = session.uploadTask(with: request, fromFile: tempFileURL)
task.resume()
completion(Result.success(true))
}
}

View File

@@ -114,7 +114,7 @@ struct ImmichMemoryProvider: TimelineProvider {
}
}
// If we didn't add any memory images (some failure occurred or no images in memory),
// If we didnt add any memory images (some failure occured or no images in memory),
// default to 12 hours of random photos
if entries.count == 0 {
// this must be a do/catch since we need to

View File

@@ -86,7 +86,7 @@ class StoreService {
_cache.remove(key.id);
}
/// Clears all values from the store (cache and DB)
/// Clears all values from thw store (cache and DB)
Future<void> clear() async {
await _storeRepository.deleteAll();
_cache.clear();

View File

@@ -15,6 +15,7 @@ import 'package:immich_mobile/presentation/widgets/backup/backup_toggle_button.w
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/backup/backup_album.provider.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/sync_status.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
@@ -141,6 +142,84 @@ class _DriftBackupPageState extends ConsumerState<DriftBackupPage> {
await stopBackup();
},
),
Container(
margin: const EdgeInsets.symmetric(horizontal: 4, vertical: 4),
decoration: BoxDecoration(
borderRadius: const BorderRadius.all(Radius.circular(20)),
gradient: LinearGradient(
colors: [
context.primaryColor.withValues(alpha: 0.5),
context.primaryColor.withValues(alpha: 0.4),
context.primaryColor.withValues(alpha: 0.5),
],
stops: const [0.0, 0.5, 1.0],
begin: Alignment.topLeft,
end: Alignment.bottomRight,
),
boxShadow: [
BoxShadow(
color: context.primaryColor.withValues(alpha: 0.1),
blurRadius: 12,
offset: const Offset(0, 2),
),
],
),
child: Container(
margin: const EdgeInsets.all(1.5),
decoration: BoxDecoration(
borderRadius: const BorderRadius.all(Radius.circular(18.5)),
color: context.colorScheme.surfaceContainerLow,
),
child: Material(
color: context.colorScheme.surfaceContainerLow,
borderRadius: const BorderRadius.all(Radius.circular(20.5)),
child: InkWell(
borderRadius: const BorderRadius.all(Radius.circular(20.5)),
onTap: () => ref.read(nativeSyncApiProvider).uploadAsset(),
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 20, vertical: 16),
child: Row(
children: [
Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(
shape: BoxShape.circle,
gradient: LinearGradient(
colors: [
context.primaryColor.withValues(alpha: 0.2),
context.primaryColor.withValues(alpha: 0.1),
],
),
),
child: Icon(Icons.upload, color: context.primaryColor, size: 24),
),
const SizedBox(width: 16),
Expanded(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Text(
"Upload Asset (for testing)",
style: context.textTheme.titleMedium?.copyWith(
fontWeight: FontWeight.w600,
color: context.primaryColor,
),
),
],
),
],
),
),
],
),
),
),
),
),
),
switch (error) {
BackupError.none => const SizedBox.shrink(),
BackupError.syncFailed => Padding(

View File

@@ -540,6 +540,34 @@ class NativeSyncApi {
}
}
Future<bool> uploadAsset() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else if (pigeonVar_replyList[0] == null) {
throw PlatformException(
code: 'null-error',
message: 'Host platform returned null value for non-null return value.',
);
} else {
return (pigeonVar_replyList[0] as bool?)!;
}
}
Future<void> cancelHashing() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing$pigeonVar_messageChannelSuffix';

View File

@@ -221,8 +221,10 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
context.scaffoldMessenger.hideCurrentSnackBar();
// send image to casting if the server has it
if (asset is RemoteAsset) {
ref.read(castProvider.notifier).loadMedia(asset, false);
if (asset.hasRemote) {
final remoteAsset = asset as RemoteAsset;
ref.read(castProvider.notifier).loadMedia(remoteAsset, false);
} else {
// casting cannot show local assets
context.scaffoldMessenger.clearSnackBars();

View File

@@ -263,6 +263,11 @@ Class | Method | HTTP request | Description
*TrashApi* | [**emptyTrash**](doc//TrashApi.md#emptytrash) | **POST** /trash/empty |
*TrashApi* | [**restoreAssets**](doc//TrashApi.md#restoreassets) | **POST** /trash/restore/assets |
*TrashApi* | [**restoreTrash**](doc//TrashApi.md#restoretrash) | **POST** /trash/restore |
*UploadApi* | [**cancelUpload**](doc//UploadApi.md#cancelupload) | **DELETE** /upload/{id} |
*UploadApi* | [**getUploadOptions**](doc//UploadApi.md#getuploadoptions) | **OPTIONS** /upload |
*UploadApi* | [**getUploadStatus**](doc//UploadApi.md#getuploadstatus) | **HEAD** /upload/{id} |
*UploadApi* | [**resumeUpload**](doc//UploadApi.md#resumeupload) | **PATCH** /upload/{id} |
*UploadApi* | [**startUpload**](doc//UploadApi.md#startupload) | **POST** /upload |
*UsersApi* | [**createProfileImage**](doc//UsersApi.md#createprofileimage) | **POST** /users/profile-image |
*UsersApi* | [**deleteProfileImage**](doc//UsersApi.md#deleteprofileimage) | **DELETE** /users/profile-image |
*UsersApi* | [**deleteUserLicense**](doc//UsersApi.md#deleteuserlicense) | **DELETE** /users/me/license |
@@ -572,6 +577,8 @@ Class | Method | HTTP request | Description
- [UpdateAlbumUserDto](doc//UpdateAlbumUserDto.md)
- [UpdateAssetDto](doc//UpdateAssetDto.md)
- [UpdateLibraryDto](doc//UpdateLibraryDto.md)
- [UploadBackupConfig](doc//UploadBackupConfig.md)
- [UploadOkDto](doc//UploadOkDto.md)
- [UsageByUserDto](doc//UsageByUserDto.md)
- [UserAdminCreateDto](doc//UserAdminCreateDto.md)
- [UserAdminDeleteDto](doc//UserAdminDeleteDto.md)

View File

@@ -60,6 +60,7 @@ part 'api/system_metadata_api.dart';
part 'api/tags_api.dart';
part 'api/timeline_api.dart';
part 'api/trash_api.dart';
part 'api/upload_api.dart';
part 'api/users_api.dart';
part 'api/users_admin_api.dart';
part 'api/view_api.dart';
@@ -343,6 +344,8 @@ part 'model/update_album_dto.dart';
part 'model/update_album_user_dto.dart';
part 'model/update_asset_dto.dart';
part 'model/update_library_dto.dart';
part 'model/upload_backup_config.dart';
part 'model/upload_ok_dto.dart';
part 'model/usage_by_user_dto.dart';
part 'model/user_admin_create_dto.dart';
part 'model/user_admin_delete_dto.dart';

379
mobile/openapi/lib/api/upload_api.dart generated Normal file
View File

@@ -0,0 +1,379 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadApi {
UploadApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> cancelUploadWithHttpInfo(String id, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<void> cancelUpload(String id, { String? key, String? slug, }) async {
final response = await cancelUploadWithHttpInfo(id, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'OPTIONS /upload' operation and returns the [Response].
Future<Response> getUploadOptionsWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/upload';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'OPTIONS',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
Future<void> getUploadOptions() async {
final response = await getUploadOptionsWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getUploadStatusWithHttpInfo(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'HEAD',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key:
///
/// * [String] slug:
Future<void> getUploadStatus(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
final response = await getUploadStatusWithHttpInfo(id, uploadDraftInteropVersion, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] id (required):
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> resumeUploadWithHttpInfo(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
headerParams[r'upload-offset'] = parameterToString(uploadOffset);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'PATCH',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] id (required):
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key:
///
/// * [String] slug:
Future<UploadOkDto?> resumeUpload(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
final response = await resumeUploadWithHttpInfo(contentLength, id, uploadComplete, uploadDraftInteropVersion, uploadOffset, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
}
return null;
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] reprDigest (required):
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
///
/// * [String] xImmichAssetData (required):
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
///
/// * [String] key:
///
/// * [String] slug:
///
/// * [String] uploadComplete:
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion:
/// Indicates the version of the RUFH protocol supported by the client.
Future<Response> startUploadWithHttpInfo(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'repr-digest'] = parameterToString(reprDigest);
if (uploadComplete != null) {
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
}
if (uploadDraftInteropVersion != null) {
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
}
headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] reprDigest (required):
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
///
/// * [String] xImmichAssetData (required):
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
///
/// * [String] key:
///
/// * [String] slug:
///
/// * [String] uploadComplete:
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion:
/// Indicates the version of the RUFH protocol supported by the client.
Future<UploadOkDto?> startUpload(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
final response = await startUploadWithHttpInfo(contentLength, reprDigest, xImmichAssetData, key: key, slug: slug, uploadComplete: uploadComplete, uploadDraftInteropVersion: uploadDraftInteropVersion, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
}
return null;
}
}

View File

@@ -740,6 +740,10 @@ class ApiClient {
return UpdateAssetDto.fromJson(value);
case 'UpdateLibraryDto':
return UpdateLibraryDto.fromJson(value);
case 'UploadBackupConfig':
return UploadBackupConfig.fromJson(value);
case 'UploadOkDto':
return UploadOkDto.fromJson(value);
case 'UsageByUserDto':
return UsageByUserDto.fromJson(value);
case 'UserAdminCreateDto':

View File

@@ -14,25 +14,31 @@ class SystemConfigBackupsDto {
/// Returns a new [SystemConfigBackupsDto] instance.
SystemConfigBackupsDto({
required this.database,
required this.upload,
});
DatabaseBackupConfig database;
UploadBackupConfig upload;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigBackupsDto &&
other.database == database;
other.database == database &&
other.upload == upload;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(database.hashCode);
(database.hashCode) +
(upload.hashCode);
@override
String toString() => 'SystemConfigBackupsDto[database=$database]';
String toString() => 'SystemConfigBackupsDto[database=$database, upload=$upload]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'database'] = this.database;
json[r'upload'] = this.upload;
return json;
}
@@ -46,6 +52,7 @@ class SystemConfigBackupsDto {
return SystemConfigBackupsDto(
database: DatabaseBackupConfig.fromJson(json[r'database'])!,
upload: UploadBackupConfig.fromJson(json[r'upload'])!,
);
}
return null;
@@ -94,6 +101,7 @@ class SystemConfigBackupsDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'database',
'upload',
};
}

View File

@@ -17,6 +17,7 @@ class SystemConfigNightlyTasksDto {
required this.databaseCleanup,
required this.generateMemories,
required this.missingThumbnails,
required this.removeStaleUploads,
required this.startTime,
required this.syncQuotaUsage,
});
@@ -29,6 +30,8 @@ class SystemConfigNightlyTasksDto {
bool missingThumbnails;
bool removeStaleUploads;
String startTime;
bool syncQuotaUsage;
@@ -39,6 +42,7 @@ class SystemConfigNightlyTasksDto {
other.databaseCleanup == databaseCleanup &&
other.generateMemories == generateMemories &&
other.missingThumbnails == missingThumbnails &&
other.removeStaleUploads == removeStaleUploads &&
other.startTime == startTime &&
other.syncQuotaUsage == syncQuotaUsage;
@@ -49,11 +53,12 @@ class SystemConfigNightlyTasksDto {
(databaseCleanup.hashCode) +
(generateMemories.hashCode) +
(missingThumbnails.hashCode) +
(removeStaleUploads.hashCode) +
(startTime.hashCode) +
(syncQuotaUsage.hashCode);
@override
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, removeStaleUploads=$removeStaleUploads, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -61,6 +66,7 @@ class SystemConfigNightlyTasksDto {
json[r'databaseCleanup'] = this.databaseCleanup;
json[r'generateMemories'] = this.generateMemories;
json[r'missingThumbnails'] = this.missingThumbnails;
json[r'removeStaleUploads'] = this.removeStaleUploads;
json[r'startTime'] = this.startTime;
json[r'syncQuotaUsage'] = this.syncQuotaUsage;
return json;
@@ -79,6 +85,7 @@ class SystemConfigNightlyTasksDto {
databaseCleanup: mapValueOfType<bool>(json, r'databaseCleanup')!,
generateMemories: mapValueOfType<bool>(json, r'generateMemories')!,
missingThumbnails: mapValueOfType<bool>(json, r'missingThumbnails')!,
removeStaleUploads: mapValueOfType<bool>(json, r'removeStaleUploads')!,
startTime: mapValueOfType<String>(json, r'startTime')!,
syncQuotaUsage: mapValueOfType<bool>(json, r'syncQuotaUsage')!,
);
@@ -132,6 +139,7 @@ class SystemConfigNightlyTasksDto {
'databaseCleanup',
'generateMemories',
'missingThumbnails',
'removeStaleUploads',
'startTime',
'syncQuotaUsage',
};

View File

@@ -0,0 +1,100 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadBackupConfig {
/// Returns a new [UploadBackupConfig] instance.
UploadBackupConfig({
required this.maxAgeHours,
});
/// Minimum value: 1
num maxAgeHours;
@override
bool operator ==(Object other) => identical(this, other) || other is UploadBackupConfig &&
other.maxAgeHours == maxAgeHours;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(maxAgeHours.hashCode);
@override
String toString() => 'UploadBackupConfig[maxAgeHours=$maxAgeHours]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'maxAgeHours'] = this.maxAgeHours;
return json;
}
/// Returns a new [UploadBackupConfig] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static UploadBackupConfig? fromJson(dynamic value) {
upgradeDto(value, "UploadBackupConfig");
if (value is Map) {
final json = value.cast<String, dynamic>();
return UploadBackupConfig(
maxAgeHours: num.parse('${json[r'maxAgeHours']}'),
);
}
return null;
}
static List<UploadBackupConfig> listFromJson(dynamic json, {bool growable = false,}) {
final result = <UploadBackupConfig>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = UploadBackupConfig.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, UploadBackupConfig> mapFromJson(dynamic json) {
final map = <String, UploadBackupConfig>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = UploadBackupConfig.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of UploadBackupConfig-objects as value to a dart map
static Map<String, List<UploadBackupConfig>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<UploadBackupConfig>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = UploadBackupConfig.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'maxAgeHours',
};
}

View File

@@ -0,0 +1,99 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadOkDto {
/// Returns a new [UploadOkDto] instance.
UploadOkDto({
required this.id,
});
String id;
@override
bool operator ==(Object other) => identical(this, other) || other is UploadOkDto &&
other.id == id;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(id.hashCode);
@override
String toString() => 'UploadOkDto[id=$id]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'id'] = this.id;
return json;
}
/// Returns a new [UploadOkDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static UploadOkDto? fromJson(dynamic value) {
upgradeDto(value, "UploadOkDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return UploadOkDto(
id: mapValueOfType<String>(json, r'id')!,
);
}
return null;
}
static List<UploadOkDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <UploadOkDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = UploadOkDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, UploadOkDto> mapFromJson(dynamic json) {
final map = <String, UploadOkDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = UploadOkDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of UploadOkDto-objects as value to a dart map
static Map<String, List<UploadOkDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<UploadOkDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = UploadOkDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'id',
};
}

View File

@@ -106,5 +106,8 @@ abstract class NativeSyncApi {
@TaskQueue(type: TaskQueueType.serialBackgroundThread)
List<HashResult> hashAssets(List<String> assetIds, {bool allowNetworkAccess = false});
@async
bool uploadAsset();
void cancelHashing();
}

View File

@@ -9225,6 +9225,324 @@
"description": "This endpoint requires the `asset.delete` permission."
}
},
"/upload": {
"options": {
"operationId": "getUploadOptions",
"parameters": [],
"responses": {
"204": {
"description": ""
}
},
"tags": [
"Upload"
]
},
"post": {
"operationId": "startUpload",
"parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "repr-digest",
"in": "header",
"description": "RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "x-immich-asset-data",
"in": "header",
"description": "RFC 9651 structured dictionary containing asset metadata with the following keys:\n- device-asset-id (string, required): Unique device asset identifier\n- device-id (string, required): Device identifier\n- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp\n- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp\n- filename (string, required): Original filename\n- is-favorite (boolean, optional): Favorite status\n- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices\n- icloud-id (string, optional): iCloud identifier for assets from iOS devices",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": ""
},
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
}
},
"/upload/{id}": {
"delete": {
"operationId": "cancelUpload",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
},
"head": {
"operationId": "getUploadStatus",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
},
"patch": {
"operationId": "resumeUpload",
"parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "upload-offset",
"in": "header",
"description": "Non-negative byte offset indicating the starting position of the data in the request body within the entire file.",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
}
},
"/users": {
"get": {
"operationId": "searchUsers",
@@ -15978,10 +16296,14 @@
"properties": {
"database": {
"$ref": "#/components/schemas/DatabaseBackupConfig"
},
"upload": {
"$ref": "#/components/schemas/UploadBackupConfig"
}
},
"required": [
"database"
"database",
"upload"
],
"type": "object"
},
@@ -16506,6 +16828,9 @@
"missingThumbnails": {
"type": "boolean"
},
"removeStaleUploads": {
"type": "boolean"
},
"startTime": {
"type": "string"
},
@@ -16518,6 +16843,7 @@
"databaseCleanup",
"generateMemories",
"missingThumbnails",
"removeStaleUploads",
"startTime",
"syncQuotaUsage"
],
@@ -17366,6 +17692,29 @@
},
"type": "object"
},
"UploadBackupConfig": {
"properties": {
"maxAgeHours": {
"minimum": 1,
"type": "number"
}
},
"required": [
"maxAgeHours"
],
"type": "object"
},
"UploadOkDto": {
"properties": {
"id": {
"type": "string"
}
},
"required": [
"id"
],
"type": "object"
},
"UsageByUserDto": {
"properties": {
"photos": {

View File

@@ -1309,8 +1309,12 @@ export type DatabaseBackupConfig = {
enabled: boolean;
keepLastAmount: number;
};
export type UploadBackupConfig = {
maxAgeHours: number;
};
export type SystemConfigBackupsDto = {
database: DatabaseBackupConfig;
upload: UploadBackupConfig;
};
export type SystemConfigFFmpegDto = {
accel: TranscodeHWAccel;
@@ -1430,6 +1434,7 @@ export type SystemConfigNightlyTasksDto = {
databaseCleanup: boolean;
generateMemories: boolean;
missingThumbnails: boolean;
removeStaleUploads: boolean;
startTime: string;
syncQuotaUsage: boolean;
};
@@ -1595,6 +1600,9 @@ export type TimeBucketsResponseDto = {
export type TrashResponseDto = {
count: number;
};
export type UploadOkDto = {
id: string;
};
export type UserUpdateMeDto = {
avatarColor?: (UserAvatarColor) | null;
email?: string;
@@ -4417,6 +4425,109 @@ export function restoreAssets({ bulkIdsDto }: {
body: bulkIdsDto
})));
}
export function getUploadOptions(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/upload", {
...opts,
method: "OPTIONS"
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function startUpload({ contentLength, key, reprDigest, slug, uploadComplete, uploadDraftInteropVersion, xImmichAssetData }: {
contentLength: string;
key?: string;
reprDigest: string;
slug?: string;
uploadComplete?: string;
uploadDraftInteropVersion?: string;
xImmichAssetData: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
} | {
status: 201;
}>(`/upload${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "POST",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"repr-digest": reprDigest,
"upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"x-immich-asset-data": xImmichAssetData
})
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function cancelUpload({ id, key, slug }: {
id: string;
key?: string;
slug?: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "DELETE"
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function getUploadStatus({ id, key, slug, uploadDraftInteropVersion }: {
id: string;
key?: string;
slug?: string;
uploadDraftInteropVersion: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "HEAD",
headers: oazapfts.mergeHeaders(opts?.headers, {
"upload-draft-interop-version": uploadDraftInteropVersion
})
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function resumeUpload({ contentLength, id, key, slug, uploadComplete, uploadDraftInteropVersion, uploadOffset }: {
contentLength: string;
id: string;
key?: string;
slug?: string;
uploadComplete: string;
uploadDraftInteropVersion: string;
uploadOffset: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
}>(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "PATCH",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"upload-offset": uploadOffset
})
}));
}
/**
* This endpoint requires the `user.read` permission.
*/

69
pnpm-lock.yaml generated
View File

@@ -67,7 +67,7 @@ importers:
version: 22.18.9
'@vitest/coverage-v8':
specifier: ^3.0.0
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
byte-size:
specifier: ^9.0.0
version: 9.0.1
@@ -115,10 +115,10 @@ importers:
version: 5.1.4(typescript@5.9.3)(vite@7.1.9(@types/node@22.18.9)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
vitest:
specifier: ^3.0.0
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest-fetch-mock:
specifier: ^0.4.0
version: 0.4.5(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
version: 0.4.5(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
yaml:
specifier: ^2.3.1
version: 2.8.1
@@ -191,6 +191,10 @@ importers:
version: 5.9.3
e2e:
dependencies:
structured-headers:
specifier: ^2.0.2
version: 2.0.2
devDependencies:
'@eslint/js':
specifier: ^9.8.0
@@ -284,7 +288,7 @@ importers:
version: 5.2.1(encoding@0.1.13)
vitest:
specifier: ^3.0.0
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2(encoding@0.1.13)))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2(encoding@0.1.13)))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
open-api/typescript-sdk:
dependencies:
@@ -511,6 +515,9 @@ importers:
socket.io:
specifier: ^4.8.1
version: 4.8.1
structured-headers:
specifier: ^2.0.2
version: 2.0.2
tailwindcss-preset-email:
specifier: ^1.4.0
version: 1.4.0(tailwindcss@3.4.18(yaml@2.8.1))
@@ -613,7 +620,7 @@ importers:
version: 13.15.3
'@vitest/coverage-v8':
specifier: ^3.0.0
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
eslint:
specifier: ^9.14.0
version: 9.37.0(jiti@2.6.1)
@@ -670,7 +677,7 @@ importers:
version: 5.1.4(typescript@5.9.3)(vite@7.1.9(@types/node@22.18.9)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
vitest:
specifier: ^3.0.0
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
web:
dependencies:
@@ -732,8 +739,8 @@ importers:
specifier: ^4.7.8
version: 4.7.8
happy-dom:
specifier: ^20.0.0
version: 20.0.0
specifier: ^18.0.1
version: 18.0.1
intl-messageformat:
specifier: ^10.7.11
version: 10.7.17
@@ -812,7 +819,7 @@ importers:
version: 6.8.0
'@testing-library/svelte':
specifier: ^5.2.8
version: 5.2.8(svelte@5.39.11)(vite@7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
version: 5.2.8(svelte@5.39.11)(vite@7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
'@testing-library/user-event':
specifier: ^14.5.2
version: 14.6.1(@testing-library/dom@10.4.0)
@@ -836,7 +843,7 @@ importers:
version: 1.5.5
'@vitest/coverage-v8':
specifier: ^3.0.0
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))
dotenv:
specifier: ^17.0.0
version: 17.2.3
@@ -899,7 +906,7 @@ importers:
version: 7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest:
specifier: ^3.0.0
version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
packages:
@@ -6976,8 +6983,8 @@ packages:
engines: {node: '>=0.4.7'}
hasBin: true
happy-dom@20.0.0:
resolution: {integrity: sha512-GkWnwIFxVGCf2raNrxImLo397RdGhLapj5cT3R2PT7FwL62Ze1DROhzmYW7+J3p9105DYMVenEejEbnq5wA37w==}
happy-dom@18.0.1:
resolution: {integrity: sha512-qn+rKOW7KWpVTtgIUi6RVmTBZJSe2k0Db0vh1f7CWrWclkkc7/Q+FrOfkZIb2eiErLyqu5AXEzE7XthO9JVxRA==}
engines: {node: '>=20.0.0'}
has-flag@4.0.0:
@@ -10318,6 +10325,10 @@ packages:
resolution: {integrity: sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==}
engines: {node: '>=18'}
structured-headers@2.0.2:
resolution: {integrity: sha512-IUul56vVHuMg2UxWhwDj9zVJE6ztYEQQkynr1FQ/NydPhivtk5+Qb2N1RS36owEFk2fNUriTguJ2R7htRObcdA==}
engines: {node: '>=18', npm: '>=6'}
style-to-js@1.1.17:
resolution: {integrity: sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA==}
@@ -15990,13 +16001,13 @@ snapshots:
picocolors: 1.1.1
redent: 3.0.0
'@testing-library/svelte@5.2.8(svelte@5.39.11)(vite@7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
'@testing-library/svelte@5.2.8(svelte@5.39.11)(vite@7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
dependencies:
'@testing-library/dom': 10.4.0
svelte: 5.39.11
optionalDependencies:
vite: 7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
'@testing-library/user-event@14.6.1(@testing-library/dom@10.4.0)':
dependencies:
@@ -16563,7 +16574,7 @@ snapshots:
'@ungap/structured-clone@1.3.0': {}
'@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
'@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
dependencies:
'@ampproject/remapping': 2.3.0
'@bcoe/v8-coverage': 1.0.2
@@ -16578,11 +16589,11 @@ snapshots:
std-env: 3.9.0
test-exclude: 7.0.1
tinyrainbow: 2.0.0
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
transitivePeerDependencies:
- supports-color
'@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
'@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1))':
dependencies:
'@ampproject/remapping': 2.3.0
'@bcoe/v8-coverage': 1.0.2
@@ -16597,7 +16608,7 @@ snapshots:
std-env: 3.9.0
test-exclude: 7.0.1
tinyrainbow: 2.0.0
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
transitivePeerDependencies:
- supports-color
@@ -19132,7 +19143,7 @@ snapshots:
optionalDependencies:
uglify-js: 3.19.3
happy-dom@20.0.0:
happy-dom@18.0.1:
dependencies:
'@types/node': 20.19.2
'@types/whatwg-mimetype': 3.0.2
@@ -23195,6 +23206,8 @@ snapshots:
dependencies:
'@tokenizer/token': 0.3.0
structured-headers@2.0.2: {}
style-to-js@1.1.17:
dependencies:
style-to-object: 1.0.9
@@ -24077,11 +24090,11 @@ snapshots:
optionalDependencies:
vite: 7.1.9(@types/node@24.7.1)(jiti@2.6.1)(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest-fetch-mock@0.4.5(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)):
vitest-fetch-mock@0.4.5(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)):
dependencies:
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest: 3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1)
vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2(encoding@0.1.13)))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2(encoding@0.1.13)))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
dependencies:
'@types/chai': 5.2.2
'@vitest/expect': 3.2.4
@@ -24109,7 +24122,7 @@ snapshots:
optionalDependencies:
'@types/debug': 4.1.12
'@types/node': 22.18.9
happy-dom: 20.0.0
happy-dom: 18.0.1
jsdom: 26.1.0(canvas@2.11.2(encoding@0.1.13))
transitivePeerDependencies:
- jiti
@@ -24125,7 +24138,7 @@ snapshots:
- tsx
- yaml
vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.9)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
dependencies:
'@types/chai': 5.2.2
'@vitest/expect': 3.2.4
@@ -24153,7 +24166,7 @@ snapshots:
optionalDependencies:
'@types/debug': 4.1.12
'@types/node': 22.18.9
happy-dom: 20.0.0
happy-dom: 18.0.1
jsdom: 26.1.0(canvas@2.11.2)
transitivePeerDependencies:
- jiti
@@ -24169,7 +24182,7 @@ snapshots:
- tsx
- yaml
vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@20.0.0)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.1)(happy-dom@18.0.1)(jiti@2.6.1)(jsdom@26.1.0(canvas@2.11.2))(lightningcss@1.30.1)(terser@5.43.1)(yaml@2.8.1):
dependencies:
'@types/chai': 5.2.2
'@vitest/expect': 3.2.4
@@ -24197,7 +24210,7 @@ snapshots:
optionalDependencies:
'@types/debug': 4.1.12
'@types/node': 24.7.1
happy-dom: 20.0.0
happy-dom: 18.0.1
jsdom: 26.1.0(canvas@2.11.2)
transitivePeerDependencies:
- jiti

View File

@@ -104,6 +104,7 @@
"sharp": "^0.34.4",
"sirv": "^3.0.0",
"socket.io": "^4.8.1",
"structured-headers": "^2.0.2",
"tailwindcss-preset-email": "^1.4.0",
"thumbhash": "^0.1.1",
"ua-parser-js": "^2.0.0",

View File

@@ -22,6 +22,9 @@ export interface SystemConfig {
cronExpression: string;
keepLastAmount: number;
};
upload: {
maxAgeHours: number;
};
};
ffmpeg: {
crf: number;
@@ -133,6 +136,7 @@ export interface SystemConfig {
clusterNewFaces: boolean;
generateMemories: boolean;
syncQuotaUsage: boolean;
removeStaleUploads: boolean;
};
trash: {
enabled: boolean;
@@ -190,6 +194,9 @@ export const defaults = Object.freeze<SystemConfig>({
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 23,
@@ -325,6 +332,7 @@ export const defaults = Object.freeze<SystemConfig>({
syncQuotaUsage: true,
missingThumbnails: true,
clusterNewFaces: true,
removeStaleUploads: true,
},
trash: {
enabled: true,

View File

@@ -0,0 +1,445 @@
import { createHash, randomUUID } from 'node:crypto';
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { serializeDictionary } from 'structured-headers';
import request from 'supertest';
import { factory } from 'test/small.factory';
import { ControllerContext, controllerSetup, mockBaseService } from 'test/utils';
const makeAssetData = (overrides?: Partial<any>): string => {
return serializeDictionary({
filename: 'test-image.jpg',
'device-asset-id': 'test-asset-id',
'device-id': 'test-device',
'file-created-at': new Date('2025-01-02T00:00:00Z').toISOString(),
'file-modified-at': new Date('2025-01-01T00:00:00Z').toISOString(),
'is-favorite': false,
...overrides,
});
};
describe(AssetUploadController.name, () => {
let ctx: ControllerContext;
let buffer: Buffer;
let checksum: string;
const service = mockBaseService(AssetUploadService);
beforeAll(async () => {
ctx = await controllerSetup(AssetUploadController, [{ provide: AssetUploadService, useValue: service }]);
return () => ctx.close();
});
beforeEach(() => {
service.resetAllMocks();
service.startUpload.mockImplementation((_, __, res, ___) => {
res.send();
return Promise.resolve();
});
service.resumeUpload.mockImplementation((_, __, res, ___, ____) => {
res.send();
return Promise.resolve();
});
service.cancelUpload.mockImplementation((_, __, res) => {
res.send();
return Promise.resolve();
});
service.getUploadStatus.mockImplementation((_, res, __, ___) => {
res.send();
return Promise.resolve();
});
ctx.reset();
buffer = Buffer.from(randomUUID());
checksum = `sha=:${createHash('sha1').update(buffer).digest('base64')}:`;
});
describe('POST /upload', () => {
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).post('/upload');
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require at least version 3 of Upload-Draft-Interop-Version header if provided', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Upload-Draft-Interop-Version', '2')
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must not be less than 3']),
}),
);
});
it('should require X-Immich-Asset-Data header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'x-immich-asset-data header is required' }));
});
it('should require Repr-Digest header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Missing repr-digest header' }));
});
it('should allow conventional upload without Upload-Complete header', async () => {
const { status } = await request(ctx.getHttpServer())
.post('/upload')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(201);
});
it('should require Upload-Length header for incomplete upload', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?0')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Missing upload-length header' }));
});
it('should infer upload length from content length if complete upload', async () => {
const { status } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.send(buffer);
expect(status).toBe(201);
});
it('should reject invalid Repr-Digest format', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', checksum)
.set('Repr-Digest', 'invalid-format')
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Invalid repr-digest header' }));
});
it('should validate device-asset-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceAssetId')]),
}),
);
});
it('should validate device-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-asset-id': 'test-asset',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceId')]),
}),
);
});
it('should validate filename is required in asset data', async () => {
const assetData = serializeDictionary({
'device-asset-id': 'test-asset',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('filename')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { body, status } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '3')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Incomplete', '?0')
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(body).toEqual({});
expect(status).not.toBe(400);
});
it('should validate Upload-Complete is a boolean structured field', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', 'true')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'upload-complete must be a structured boolean value' }));
});
it('should validate Upload-Length is a positive integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '-100')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadLength must not be less than 1']),
}),
);
});
});
describe('PATCH /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).patch(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Offset', '0')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must be an integer number', 'version must not be less than 3']),
}),
);
});
it('should require Upload-Offset header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([
'uploadOffset must be an integer number',
'uploadOffset must not be less than 0',
]),
}),
);
});
it('should require Upload-Complete header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.set('Content-Type', 'application/partial-upload')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['uploadComplete must be a boolean value'] }));
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
it('should validate Upload-Offset is a non-negative integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '-50')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadOffset must not be less than 0']),
}),
);
});
it('should require Content-Type: application/partial-upload for version >= 6', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '6')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.set('Content-Type', 'application/octet-stream')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: ['contentType must be equal to application/partial-upload'],
}),
);
});
it('should allow other Content-Type for version < 6', async () => {
const { body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.set('Content-Type', 'application/octet-stream')
.send();
// Will fail for other reasons, but not content-type validation
expect(body).not.toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('contentType')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { status } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.send();
// Should not fail validation
expect(status).not.toBe(400);
});
});
describe('DELETE /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).delete(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer()).delete('/upload/invalid-uuid');
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
});
describe('HEAD /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status } = await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(status).toBe(400);
});
it('should validate UUID parameter', async () => {
const { status } = await request(ctx.getHttpServer())
.head('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(400);
});
});
});

View File

@@ -0,0 +1,108 @@
import { Controller, Delete, Head, HttpCode, HttpStatus, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import { GetUploadStatusDto, Header, ResumeUploadDto, StartUploadDto, UploadOkDto } from 'src/dtos/asset-upload.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ImmichHeader, Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { validateSyncOrReject } from 'src/utils/request';
import { UUIDParamDto } from 'src/validation';
const apiInteropVersion = {
name: Header.InteropVersion,
description: `Indicates the version of the RUFH protocol supported by the client.`,
required: true,
};
const apiUploadComplete = {
name: Header.UploadComplete,
description:
'Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.',
required: true,
};
const apiContentLength = {
name: Header.ContentLength,
description: 'Non-negative size of the request body in bytes.',
required: true,
};
// This is important to let go of the asset lock for an inactive request
const SOCKET_TIMEOUT_MS = 30_000;
@ApiTags('Upload')
@Controller('upload')
export class AssetUploadController {
constructor(private service: AssetUploadService) {}
@Post()
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader({
name: ImmichHeader.AssetData,
description: `RFC 9651 structured dictionary containing asset metadata with the following keys:
- device-asset-id (string, required): Unique device asset identifier
- device-id (string, required): Device identifier
- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp
- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp
- filename (string, required): Original filename
- is-favorite (boolean, optional): Favorite status
- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices
- icloud-id (string, optional): iCloud identifier for assets from iOS devices`,
required: true,
example:
'device-asset-id="abc123", device-id="phone1", filename="photo.jpg", file-created-at="2024-01-01T00:00:00Z", file-modified-at="2024-01-01T00:00:00Z"',
})
@ApiHeader({
name: Header.ReprDigest,
description:
'RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.',
required: true,
})
@ApiHeader({ ...apiInteropVersion, required: false })
@ApiHeader({ ...apiUploadComplete, required: false })
@ApiHeader(apiContentLength)
@ApiOkResponse({ type: UploadOkDto })
startUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response): Promise<void> {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.startUpload(auth, req, res, validateSyncOrReject(StartUploadDto, req.headers));
}
@Patch(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader({
name: Header.UploadOffset,
description:
'Non-negative byte offset indicating the starting position of the data in the request body within the entire file.',
required: true,
})
@ApiHeader(apiInteropVersion)
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
@ApiOkResponse({ type: UploadOkDto })
resumeUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.resumeUpload(auth, req, res, id, validateSyncOrReject(ResumeUploadDto, req.headers));
}
@Delete(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
cancelUpload(@Auth() auth: AuthDto, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.cancelUpload(auth, id, res);
}
@Head(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader(apiInteropVersion)
getUploadStatus(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.getUploadStatus(auth, res, id, validateSyncOrReject(GetUploadStatusDto, req.headers));
}
@Options()
@HttpCode(HttpStatus.NO_CONTENT)
getUploadOptions(@Res() res: Response) {
return this.service.getUploadOptions(res);
}
}

View File

@@ -3,6 +3,7 @@ import { AlbumController } from 'src/controllers/album.controller';
import { ApiKeyController } from 'src/controllers/api-key.controller';
import { AppController } from 'src/controllers/app.controller';
import { AssetMediaController } from 'src/controllers/asset-media.controller';
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetController } from 'src/controllers/asset.controller';
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
import { AuthController } from 'src/controllers/auth.controller';
@@ -40,6 +41,7 @@ export const controllers = [
AppController,
AssetController,
AssetMediaController,
AssetUploadController,
AuthController,
AuthAdminController,
DownloadController,

View File

@@ -0,0 +1,196 @@
import { BadRequestException } from '@nestjs/common';
import { ApiProperty } from '@nestjs/swagger';
import { Expose, plainToInstance, Transform, Type } from 'class-transformer';
import { Equals, IsBoolean, IsInt, IsNotEmpty, IsString, Min, ValidateIf, ValidateNested } from 'class-validator';
import { ImmichHeader } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate } from 'src/validation';
import { parseDictionary } from 'structured-headers';
export enum Header {
ContentLength = 'content-length',
ContentType = 'content-type',
InteropVersion = 'upload-draft-interop-version',
ReprDigest = 'repr-digest',
UploadComplete = 'upload-complete',
UploadIncomplete = 'upload-incomplete',
UploadLength = 'upload-length',
UploadOffset = 'upload-offset',
}
export class UploadAssetDataDto {
@IsNotEmpty()
@IsString()
deviceAssetId!: string;
@IsNotEmpty()
@IsString()
deviceId!: string;
@ValidateDate()
fileCreatedAt!: Date;
@ValidateDate()
fileModifiedAt!: Date;
@IsString()
@IsNotEmpty()
filename!: string;
@ValidateBoolean({ optional: true })
isFavorite?: boolean;
@Optional()
@IsString()
@IsNotEmpty()
livePhotoVideoId?: string;
@Optional()
@IsString()
@IsNotEmpty()
iCloudId!: string;
}
export class BaseUploadHeadersDto {
@Expose({ name: Header.ContentLength })
@Min(0)
@IsInt()
@Type(() => Number)
contentLength!: number;
}
export class StartUploadDto extends BaseUploadHeadersDto {
@Expose({ name: Header.InteropVersion })
@Optional()
@Min(3)
@IsInt()
@Type(() => Number)
version?: number;
@Expose({ name: ImmichHeader.AssetData })
@ValidateNested()
@Transform(({ value }) => {
if (!value) {
throw new BadRequestException(`${ImmichHeader.AssetData} header is required`);
}
try {
const dict = parseDictionary(value);
return plainToInstance(UploadAssetDataDto, {
deviceAssetId: dict.get('device-asset-id')?.[0],
deviceId: dict.get('device-id')?.[0],
filename: dict.get('filename')?.[0],
duration: dict.get('duration')?.[0],
fileCreatedAt: dict.get('file-created-at')?.[0],
fileModifiedAt: dict.get('file-modified-at')?.[0],
isFavorite: dict.get('is-favorite')?.[0],
livePhotoVideoId: dict.get('live-photo-video-id')?.[0],
iCloudId: dict.get('icloud-id')?.[0],
});
} catch {
throw new BadRequestException(`${ImmichHeader.AssetData} must be a valid structured dictionary`);
}
})
assetData!: UploadAssetDataDto;
@Expose({ name: Header.ReprDigest })
@Transform(({ value }) => {
if (!value) {
throw new BadRequestException(`Missing ${Header.ReprDigest} header`);
}
const checksum = parseDictionary(value).get('sha')?.[0];
if (checksum instanceof ArrayBuffer && checksum.byteLength === 20) {
return Buffer.from(checksum);
}
throw new BadRequestException(`Invalid ${Header.ReprDigest} header`);
})
checksum!: Buffer;
@Expose()
@Min(1)
@IsInt()
@Transform(({ obj }) => {
const uploadLength = obj[Header.UploadLength];
if (uploadLength != undefined) {
return Number(uploadLength);
}
const contentLength = obj[Header.ContentLength];
if (contentLength && isUploadComplete(obj) !== false) {
return Number(contentLength);
}
throw new BadRequestException(`Missing ${Header.UploadLength} header`);
})
uploadLength!: number;
@Expose()
@Transform(({ obj }) => isUploadComplete(obj))
uploadComplete?: boolean;
}
export class ResumeUploadDto extends BaseUploadHeadersDto {
@Expose({ name: Header.InteropVersion })
@Min(3)
@IsInt()
@Type(() => Number)
version!: number;
@Expose({ name: Header.ContentType })
@ValidateIf((o) => o.version && o.version >= 6)
@Equals('application/partial-upload')
contentType!: string;
@Expose({ name: Header.UploadLength })
@Min(1)
@IsInt()
@Type(() => Number)
@Optional()
uploadLength?: number;
@Expose({ name: Header.UploadOffset })
@Min(0)
@IsInt()
@Type(() => Number)
uploadOffset!: number;
@Expose()
@IsBoolean()
@Transform(({ obj }) => isUploadComplete(obj))
uploadComplete!: boolean;
}
export class GetUploadStatusDto {
@Expose({ name: Header.InteropVersion })
@Min(3)
@IsInt()
@Type(() => Number)
version!: number;
}
export class UploadOkDto {
@ApiProperty()
id!: string;
}
const STRUCTURED_TRUE = '?1';
const STRUCTURED_FALSE = '?0';
function isUploadComplete(obj: any) {
const uploadComplete = obj[Header.UploadComplete];
if (uploadComplete === STRUCTURED_TRUE) {
return true;
} else if (uploadComplete === STRUCTURED_FALSE) {
return false;
} else if (uploadComplete !== undefined) {
throw new BadRequestException('upload-complete must be a structured boolean value');
}
const uploadIncomplete = obj[Header.UploadIncomplete];
if (uploadIncomplete === STRUCTURED_TRUE) {
return false;
} else if (uploadIncomplete === STRUCTURED_FALSE) {
return true;
} else if (uploadComplete !== undefined) {
throw new BadRequestException('upload-incomplete must be a structured boolean value');
}
}

View File

@@ -55,11 +55,23 @@ export class DatabaseBackupConfig {
keepLastAmount!: number;
}
export class UploadBackupConfig {
@IsInt()
@IsPositive()
@IsNotEmpty()
maxAgeHours!: number;
}
export class SystemConfigBackupsDto {
@Type(() => DatabaseBackupConfig)
@ValidateNested()
@IsObject()
database!: DatabaseBackupConfig;
@Type(() => UploadBackupConfig)
@ValidateNested()
@IsObject()
upload!: UploadBackupConfig;
}
export class SystemConfigFFmpegDto {
@@ -344,6 +356,9 @@ class SystemConfigNightlyTasksDto {
@ValidateBoolean()
syncQuotaUsage!: boolean;
@ValidateBoolean()
removeStaleUploads!: boolean;
}
class SystemConfigOAuthDto {

View File

@@ -20,6 +20,7 @@ export enum ImmichHeader {
SharedLinkSlug = 'x-immich-share-slug',
Checksum = 'x-immich-checksum',
Cid = 'x-immich-cid',
AssetData = 'x-immich-asset-data',
}
export enum ImmichQuery {
@@ -303,6 +304,7 @@ export enum AssetStatus {
Active = 'active',
Trashed = 'trashed',
Deleted = 'deleted',
Partial = 'partial',
}
export enum SourceType {
@@ -493,6 +495,7 @@ export enum BootstrapEventPriority {
JobService = -190,
// Initialise config after other bootstrap services, stop other services from using config on bootstrap
SystemConfig = 100,
UploadService = 200,
}
export enum QueueName {
@@ -528,6 +531,8 @@ export enum JobName {
AssetFileMigration = 'AssetFileMigration',
AssetGenerateThumbnailsQueueAll = 'AssetGenerateThumbnailsQueueAll',
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
PartialAssetCleanup = 'PartialAssetCleanup',
PartialAssetCleanupQueueAll = 'PartialAssetCleanupQueueAll',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',

View File

@@ -14,6 +14,7 @@ from
left join "smart_search" on "asset"."id" = "smart_search"."assetId"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -40,6 +41,7 @@ from
"asset"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -52,6 +54,7 @@ from
"asset"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -78,7 +81,8 @@ from
"asset"
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
where
"asset"."deletedAt" is null
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."visibility" != $1
and (
"asset_job_status"."previewAt" is null
@@ -110,6 +114,7 @@ from
"asset"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForGenerateThumbnailJob
select
@@ -141,6 +146,7 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForMetadataExtraction
select
@@ -178,6 +184,7 @@ from
"asset"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getAlbumThumbnailFiles
select
@@ -198,7 +205,8 @@ from
inner join "smart_search" on "asset"."id" = "smart_search"."assetId"
inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "asset"."id"
where
"asset"."deletedAt" is null
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."visibility" in ('archive', 'timeline')
and "job_status"."duplicatesDetectedAt" is null
@@ -210,6 +218,7 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where
"asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null
and not exists (
@@ -244,6 +253,7 @@ from
"asset"
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForDetectFacesJob
select
@@ -284,6 +294,7 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForSyncAssets
select
@@ -368,6 +379,7 @@ from
) as "stacked_assets" on "stack"."id" is not null
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForVideoConversion
select
@@ -381,6 +393,7 @@ where
or "asset"."encodedVideoPath" = $2
)
and "asset"."visibility" != $3
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion
@@ -394,6 +407,7 @@ from
where
"asset"."id" = $1
and "asset"."type" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForMetadataExtraction
select
@@ -406,6 +420,7 @@ where
"asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null
)
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob
@@ -426,7 +441,8 @@ from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."deletedAt" is null
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob
@@ -447,7 +463,8 @@ from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."deletedAt" is null
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.streamForDeletedJob
select
@@ -457,6 +474,7 @@ from
"asset"
where
"asset"."deletedAt" <= $1
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForSidecar
select
@@ -469,6 +487,7 @@ where
or "asset"."sidecarPath" is null
)
and "asset"."visibility" != $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForDetectFacesJob
select
@@ -478,8 +497,10 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where
"asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null
and "asset"."status" != 'partial'
order by
"asset"."fileCreatedAt" desc
@@ -489,4 +510,14 @@ select
from
"asset"
where
"asset"."deletedAt" is null
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.streamForPartialAssetCleanupJob
select
"id"
from
"asset"
where
"asset"."status" = 'partial'
and "asset"."createdAt" < $1

View File

@@ -46,6 +46,68 @@ where
"assetId" = $1
and "key" = $2
-- AssetRepository.getCompletionMetadata
select
"originalPath" as "path",
"status",
"fileModifiedAt",
"createdAt",
"checksum",
"fileSizeInByte" as "size"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"id" = $1
and "ownerId" = $2
-- AssetRepository.setComplete
update "asset" as "complete_asset"
set
"status" = 'active',
"visibility" = case
when (
"complete_asset"."type" = 'VIDEO'
and exists (
select
from
"asset"
where
"complete_asset"."id" = "asset"."livePhotoVideoId"
)
) then 'hidden'::asset_visibility_enum
else 'timeline'::asset_visibility_enum
end
where
"id" = $1
and "status" = 'partial'
-- AssetRepository.removeAndDecrementQuota
with
"asset_exif" as (
select
"fileSizeInByte"
from
"asset_exif"
where
"assetId" = $1
),
"asset" as (
delete from "asset"
where
"id" = $2
returning
"ownerId"
)
update "user"
set
"quotaUsageInBytes" = "quotaUsageInBytes" - "fileSizeInByte"
from
"asset_exif",
"asset"
where
"user"."id" = "asset"."ownerId"
-- AssetRepository.getByDayOfYear
with
"res" as (
@@ -258,7 +320,9 @@ where
-- AssetRepository.getUploadAssetIdByChecksum
select
"id"
"id",
"status",
"createdAt"
from
"asset"
where

View File

@@ -1,10 +1,10 @@
import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { Kysely, sql } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { Asset, columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
import { AssetFileType, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema';
import { StorageAsset } from 'src/types';
import {
@@ -28,6 +28,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.leftJoin('smart_search', 'asset.id', 'smart_search.assetId')
.select(['id', 'type', 'ownerId', 'duplicateId', 'stackId', 'visibility', 'smart_search.embedding'])
.limit(1)
@@ -39,6 +40,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.select(['id', 'sidecarPath', 'originalPath'])
.select((eb) =>
jsonArrayFrom(
@@ -58,6 +60,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.select(['id', 'sidecarPath', 'originalPath'])
.limit(1)
.executeTakeFirst();
@@ -69,6 +72,7 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id', 'asset.thumbhash'])
.select(withFiles)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.$if(!force, (qb) =>
@@ -93,6 +97,7 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.ownerId', 'asset.encodedVideoPath'])
.select(withFiles)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -112,6 +117,7 @@ export class AssetJobRepository {
.select(withFiles)
.$call(withExifInner)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -122,6 +128,7 @@ export class AssetJobRepository {
.select(columns.asset)
.select(withFaces)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -139,6 +146,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.innerJoin('asset_job_status as job_status', 'assetId', 'asset.id')
.where('job_status.previewAt', 'is not', null);
@@ -149,6 +157,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.select(['asset.id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.innerJoin('smart_search', 'asset.id', 'smart_search.assetId')
.$call(withDefaultVisibility)
@@ -177,6 +186,7 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.visibility'])
.select((eb) => withFiles(eb, AssetFileType.Preview))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -189,6 +199,7 @@ export class AssetJobRepository {
.select((eb) => withFaces(eb, true))
.select((eb) => withFiles(eb, AssetFileType.Preview))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -241,6 +252,7 @@ export class AssetJobRepository {
)
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -255,6 +267,7 @@ export class AssetJobRepository {
.where((eb) => eb.or([eb('asset.encodedVideoPath', 'is', null), eb('asset.encodedVideoPath', '=', '')]))
.where('asset.visibility', '!=', AssetVisibility.Hidden),
)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
@@ -266,6 +279,7 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.ownerId', 'asset.originalPath', 'asset.encodedVideoPath'])
.where('asset.id', '=', id)
.where('asset.type', '=', AssetType.Video)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -281,6 +295,7 @@ export class AssetJobRepository {
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
),
)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
@@ -303,6 +318,7 @@ export class AssetJobRepository {
'asset_exif.timeZone',
'asset_exif.fileSizeInByte',
])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null);
}
@@ -324,6 +340,7 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['id', 'isOffline'])
.where('asset.deletedAt', '<=', trashedBefore)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.stream();
}
@@ -336,6 +353,7 @@ export class AssetJobRepository {
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.stream();
}
@@ -344,12 +362,38 @@ export class AssetJobRepository {
return this.assetsWithPreviews()
.$if(force === false, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
.select(['asset.id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.orderBy('asset.fileCreatedAt', 'desc')
.stream();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForMigrationJob() {
return this.db.selectFrom('asset').select(['id']).where('asset.deletedAt', 'is', null).stream();
return this.db
.selectFrom('asset')
.select(['id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
getForPartialAssetCleanupJob(assetId: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select(['originalPath as path', 'fileSizeInByte as size', 'checksum', 'fileModifiedAt'])
.where('id', '=', assetId)
.where('status', '=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForPartialAssetCleanupJob(createdBefore: Date) {
return this.db
.selectFrom('asset')
.select(['id'])
.where('asset.status', '=', sql.lit(AssetStatus.Partial))
.where('asset.createdAt', '<', createdBefore)
.stream();
}
}

View File

@@ -253,6 +253,96 @@ export class AssetRepository {
return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow();
}
createWithMetadata(asset: Insertable<AssetTable> & { id: string }, size: number, metadata?: AssetMetadataItem[]) {
let query = this.db;
if (asset.livePhotoVideoId) {
(query as any) = query.with('motion_asset', (qb) =>
qb
.updateTable('asset')
.set({ visibility: AssetVisibility.Hidden })
.where('id', '=', asset.livePhotoVideoId!)
.where('type', '=', sql.lit(AssetType.Video))
.where('ownerId', '=', asset.ownerId)
.returning('id'),
);
}
(query as any) = query
.with('asset', (qb) =>
qb
.insertInto('asset')
.values(
asset.livePhotoVideoId ? { ...asset, livePhotoVideoId: sql<string>`(select id from motion_asset)` } : asset,
)
.returning(['id', 'ownerId']),
)
.with('exif', (qb) =>
qb
.insertInto('asset_exif')
.columns(['assetId', 'fileSizeInByte'])
.expression((eb) => eb.selectFrom('asset').select(['asset.id', eb.val(size).as('fileSizeInByte')])),
);
if (metadata && metadata.length > 0) {
(query as any) = query.with('metadata', (qb) =>
qb.insertInto('asset_metadata').values(metadata.map(({ key, value }) => ({ assetId: asset.id, key, value }))),
);
}
return query
.updateTable('user')
.from('asset')
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" + ${size}` })
.whereRef('user.id', '=', 'asset.ownerId')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
getCompletionMetadata(assetId: string, ownerId: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select(['originalPath as path', 'status', 'fileModifiedAt', 'createdAt', 'checksum', 'fileSizeInByte as size'])
.where('id', '=', assetId)
.where('ownerId', '=', ownerId)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async setComplete(assetId: string) {
await this.db
.updateTable('asset as complete_asset')
.set((eb) => ({
status: sql.lit(AssetStatus.Active),
visibility: eb
.case()
.when(
eb.and([
eb('complete_asset.type', '=', sql.lit(AssetType.Video)),
eb.exists(eb.selectFrom('asset').whereRef('complete_asset.id', '=', 'asset.livePhotoVideoId')),
]),
)
.then(sql<AssetVisibility>`'hidden'::asset_visibility_enum`)
.else(sql<AssetVisibility>`'timeline'::asset_visibility_enum`)
.end(),
}))
.where('id', '=', assetId)
.where('status', '=', sql.lit(AssetStatus.Partial))
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
async removeAndDecrementQuota(id: string): Promise<void> {
await this.db
.with('asset_exif', (qb) => qb.selectFrom('asset_exif').where('assetId', '=', id).select('fileSizeInByte'))
.with('asset', (qb) => qb.deleteFrom('asset').where('id', '=', id).returning('ownerId'))
.updateTable('user')
.from(['asset_exif', 'asset'])
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" - "fileSizeInByte"` })
.whereRef('user.id', '=', 'asset.ownerId')
.execute();
}
createAll(assets: Insertable<AssetTable>[]) {
return this.db.insertInto('asset').values(assets).returningAll().execute();
}
@@ -492,17 +582,15 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
async getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer): Promise<string | undefined> {
const asset = await this.db
getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer) {
return this.db
.selectFrom('asset')
.select('id')
.select(['id', 'status', 'createdAt'])
.where('ownerId', '=', asUuid(ownerId))
.where('checksum', '=', checksum)
.where('libraryId', 'is', null)
.limit(1)
.executeTakeFirst();
return asset?.id;
}
findLivePhotoMatch(options: LivePhotoSearchOptions) {

View File

@@ -451,6 +451,20 @@ export class DatabaseRepository {
return res as R;
}
async withUuidLock<R>(uuid: string, callback: () => Promise<R>): Promise<R> {
let res;
await this.db.connection().execute(async (connection) => {
try {
await this.acquireUuidLock(uuid, connection);
res = await callback();
} finally {
await this.releaseUuidLock(uuid, connection);
}
});
return res as R;
}
tryLock(lock: DatabaseLock): Promise<boolean> {
return this.db.connection().execute(async (connection) => this.acquireTryLock(lock, connection));
}
@@ -467,6 +481,10 @@ export class DatabaseRepository {
await sql`SELECT pg_advisory_lock(${lock})`.execute(connection);
}
private async acquireUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_lock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
}
private async acquireTryLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<boolean> {
const { rows } = await sql<{
pg_try_advisory_lock: boolean;
@@ -477,4 +495,8 @@ export class DatabaseRepository {
private async releaseLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_unlock(${lock})`.execute(connection);
}
private async releaseUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_unlock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
}
}

View File

@@ -80,6 +80,9 @@ type EventMap = {
// stack bulk events
StackDeleteAll: [{ stackIds: string[]; userId: string }];
// upload events
UploadAbort: [{ assetId: string; abortTime: Date }];
// user events
UserSignup: [{ notify: boolean; id: string; password?: string }];
@@ -87,7 +90,7 @@ type EventMap = {
WebsocketConnect: [{ userId: string }];
};
export const serverEvents = ['ConfigUpdate'] as const;
export const serverEvents = ['ConfigUpdate', 'UploadAbort'] as const;
export type ServerEvents = (typeof serverEvents)[number];
export type EmitEvent = keyof EventMap;

View File

@@ -62,7 +62,11 @@ export class StorageRepository {
}
createWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'w' });
return createWriteStream(filepath, { flags: 'w', highWaterMark: 1024 * 1024 });
}
createOrAppendWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'a', highWaterMark: 1024 * 1024 });
}
createOrOverwriteFile(filepath: string, buffer: Buffer) {
@@ -156,10 +160,13 @@ export class StorageRepository {
}
}
mkdir(filepath: string): Promise<string | undefined> {
return fs.mkdir(filepath, { recursive: true });
}
mkdirSync(filepath: string): void {
if (!existsSync(filepath)) {
mkdirSync(filepath, { recursive: true });
}
// does not throw an error if the folder already exists
mkdirSync(filepath, { recursive: true });
}
existsSync(filepath: string) {

View File

@@ -0,0 +1,9 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TYPE "assets_status_enum" ADD VALUE IF NOT EXISTS 'partial'`.execute(db);
}
export async function down(): Promise<void> {
// Cannot remove enum values in PostgreSQL
}

View File

@@ -215,7 +215,11 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('hex'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@@ -224,7 +228,11 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset by base64', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('base64'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@@ -378,7 +386,11 @@ describe(AssetMediaService.name, () => {
(error as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.create.mockRejectedValue(error);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(assetEntity.id);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: assetEntity.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.uploadAsset(authStub.user1, createDto, file)).resolves.toEqual({
id: 'id_1',
@@ -803,7 +815,11 @@ describe(AssetMediaService.name, () => {
mocks.asset.update.mockRejectedValue(error);
mocks.asset.getById.mockResolvedValueOnce(sidecarAsset);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(sidecarAsset.id);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: sidecarAsset.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([sidecarAsset.id]));
// this is the original file size
mocks.storage.stat.mockResolvedValue({ size: 0 } as Stats);

View File

@@ -43,12 +43,12 @@ export class AssetMediaService extends BaseService {
return;
}
const assetId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
if (!assetId) {
const asset = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
if (!asset) {
return;
}
return { id: assetId, status: AssetMediaStatus.DUPLICATE };
return { id: asset.id, status: AssetMediaStatus.DUPLICATE };
}
canUploadFile({ auth, fieldName, file }: UploadRequest): true {
@@ -165,6 +165,10 @@ export class AssetMediaService extends BaseService {
throw new Error('Asset not found');
}
if (asset.status === AssetStatus.Partial) {
throw new BadRequestException('Cannot replace a partial asset');
}
this.requireQuota(auth, file.size);
await this.replaceFileData(asset.id, dto, file, sidecarFile?.originalPath);
@@ -313,12 +317,12 @@ export class AssetMediaService extends BaseService {
// handle duplicates with a success response
if (isAssetChecksumConstraint(error)) {
const duplicateId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
if (!duplicateId) {
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
if (!duplicate) {
this.logger.error(`Error locating duplicate for checksum constraint`);
throw new InternalServerErrorException();
}
return { status: AssetMediaStatus.DUPLICATE, id: duplicateId };
return { status: AssetMediaStatus.DUPLICATE, id: duplicate.id };
}
this.logger.error(`Error uploading file ${error}`, error?.stack);

View File

@@ -0,0 +1,456 @@
import { BadRequestException, InternalServerErrorException } from '@nestjs/common';
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
import { authStub } from 'test/fixtures/auth.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(AssetUploadService.name, () => {
let sut: AssetUploadService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(AssetUploadService));
});
describe('onStart', () => {
const mockDto = {
assetData: {
filename: 'test.jpg',
deviceAssetId: 'device-asset-1',
deviceId: 'device-1',
fileCreatedAt: new Date('2025-01-01T00:00:00Z'),
fileModifiedAt: new Date('2025-01-01T12:00:00Z'),
isFavorite: false,
iCloudId: '',
},
checksum: Buffer.from('checksum'),
uploadLength: 1024,
uploadComplete: true,
contentLength: 1024,
isComplete: true,
version: 8,
};
it('should create a new asset and return upload metadata', async () => {
const assetId = factory.uuid();
mocks.crypto.randomUUID.mockReturnValue(assetId);
const result = await sut.onStart(authStub.user1, mockDto);
expect(result).toEqual({
id: assetId,
path: expect.stringContaining(assetId),
status: AssetStatus.Partial,
isDuplicate: false,
});
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
id: assetId,
ownerId: authStub.user1.user.id,
checksum: mockDto.checksum,
deviceAssetId: mockDto.assetData.deviceAssetId,
deviceId: mockDto.assetData.deviceId,
fileCreatedAt: mockDto.assetData.fileCreatedAt,
fileModifiedAt: mockDto.assetData.fileModifiedAt,
type: AssetType.Image,
isFavorite: false,
status: AssetStatus.Partial,
visibility: AssetVisibility.Hidden,
originalFileName: 'test.jpg',
}),
1024,
undefined,
);
});
it('should determine asset type from filename extension', async () => {
const videoDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'video.mp4' } };
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, videoDto);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
type: AssetType.Video,
}),
expect.anything(),
undefined,
);
});
it('should throw BadRequestException for unsupported file types', async () => {
const unsupportedDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'document.xyz' } };
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow(BadRequestException);
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow('unsupported file type');
});
it('should validate quota before creating asset', async () => {
const authWithQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: 2000,
quotaUsageInBytes: 1500,
},
};
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow(BadRequestException);
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow('Quota has been exceeded');
});
it('should allow upload when quota is null (unlimited)', async () => {
const authWithUnlimitedQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: null,
quotaUsageInBytes: 1000,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await expect(sut.onStart(authWithUnlimitedQuota, mockDto)).resolves.toBeDefined();
});
it('should allow upload when within quota', async () => {
const authWithQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: 5000,
quotaUsageInBytes: 1000,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
const result = await sut.onStart(authWithQuota, mockDto);
expect(result.isDuplicate).toBe(false);
});
it('should handle duplicate detection via checksum constraint', async () => {
const existingAssetId = factory.uuid();
const checksumError = new Error('duplicate key value violates unique constraint');
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: existingAssetId,
status: AssetStatus.Partial,
createdAt: new Date(),
});
const result = await sut.onStart(authStub.user1, mockDto);
expect(result).toEqual({
id: existingAssetId,
path: expect.any(String),
status: AssetStatus.Partial,
isDuplicate: true,
});
expect(mocks.asset.getUploadAssetIdByChecksum).toHaveBeenCalledWith(authStub.user1.user.id, mockDto.checksum);
});
it('should throw InternalServerErrorException if duplicate lookup fails', async () => {
const checksumError = new Error('duplicate key value violates unique constraint');
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(undefined);
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
});
it('should throw InternalServerErrorException for non-checksum errors', async () => {
const genericError = new Error('database connection failed');
mocks.asset.createWithMetadata.mockRejectedValue(genericError);
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
});
it('should include iCloud metadata when provided', async () => {
const dtoWithICloud = {
...mockDto,
assetData: {
...mockDto.assetData,
iCloudId: 'icloud-123',
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, dtoWithICloud);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(expect.anything(), expect.anything(), [
{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'icloud-123' } },
]);
});
it('should set isFavorite when true', async () => {
const favoriteDto = {
...mockDto,
assetData: {
...mockDto.assetData,
isFavorite: true,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, favoriteDto);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
isFavorite: true,
}),
expect.anything(),
undefined,
);
});
});
describe('onComplete', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
const fileModifiedAt = new Date('2025-01-01T12:00:00Z');
it('should mark asset as complete and queue metadata extraction job', async () => {
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.AssetExtractMetadata,
data: { id: assetId, source: 'upload' },
});
});
it('should update file modification time', async () => {
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.storage.utimes).toHaveBeenCalledWith(path, expect.any(Date), fileModifiedAt);
});
it('should handle utimes failure gracefully', async () => {
mocks.storage.utimes.mockRejectedValue(new Error('Permission denied'));
await expect(sut.onComplete({ id: assetId, path, fileModifiedAt })).resolves.toBeUndefined();
// Should still complete asset and queue job
expect(mocks.asset.setComplete).toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalled();
});
it('should retry setComplete on transient failures', async () => {
mocks.asset.setComplete
.mockRejectedValueOnce(new Error('Transient error'))
.mockRejectedValueOnce(new Error('Transient error'))
.mockResolvedValue();
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.asset.setComplete).toHaveBeenCalledTimes(3);
});
it('should retry job queueing on transient failures', async () => {
mocks.job.queue.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.job.queue).toHaveBeenCalledTimes(2);
});
});
describe('onCancel', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
it('should delete file and remove asset record', async () => {
await sut.onCancel(assetId, path);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should retry unlink on transient failures', async () => {
mocks.storage.unlink.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onCancel(assetId, path);
expect(mocks.storage.unlink).toHaveBeenCalledTimes(2);
});
it('should retry removeAndDecrementQuota on transient failures', async () => {
mocks.asset.removeAndDecrementQuota.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onCancel(assetId, path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledTimes(2);
});
});
describe('removeStaleUploads', () => {
it('should queue cleanup jobs for stale partial assets', async () => {
const staleAssets = [{ id: factory.uuid() }, { id: factory.uuid() }, { id: factory.uuid() }];
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () {
for (const asset of staleAssets) {
yield asset;
}
})(),
);
await sut.removeStaleUploads();
expect(mocks.assetJob.streamForPartialAssetCleanupJob).toHaveBeenCalledWith(expect.any(Date));
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.PartialAssetCleanup, data: staleAssets[0] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[1] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[2] },
]);
});
it('should batch cleanup jobs', async () => {
const assets = Array.from({ length: 1500 }, () => ({ id: factory.uuid() }));
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () {
for (const asset of assets) {
yield asset;
}
})(),
);
await sut.removeStaleUploads();
// Should be called twice: once for 1000, once for 500
expect(mocks.job.queueAll).toHaveBeenCalledTimes(2);
});
it('should handle empty stream', async () => {
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue((async function* () {})());
await sut.removeStaleUploads();
expect(mocks.job.queueAll).toHaveBeenCalledWith([]);
});
});
describe('removeStaleUpload', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
it('should skip if asset not found', async () => {
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue(undefined);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Skipped);
expect(mocks.storage.stat).not.toHaveBeenCalled();
});
it('should complete asset if file matches expected state', async () => {
const checksum = Buffer.from('checksum');
const fileModifiedAt = new Date();
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum,
fileModifiedAt,
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
mocks.crypto.hashFile.mockResolvedValue(checksum);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
expect(mocks.storage.unlink).not.toHaveBeenCalled();
});
it('should cancel asset if file size does not match', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 512 } as any);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if checksum does not match', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('expected-checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
mocks.crypto.hashFile.mockResolvedValue(Buffer.from('actual-checksum'));
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if file does not exist', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
const error = new Error('File not found') as NodeJS.ErrnoException;
error.code = 'ENOENT';
mocks.storage.stat.mockRejectedValue(error);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if stat fails with permission error', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
const error = new Error('Permission denied') as NodeJS.ErrnoException;
error.code = 'EACCES';
mocks.storage.stat.mockRejectedValue(error);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
});
});

View File

@@ -0,0 +1,454 @@
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
import { Response } from 'express';
import { DateTime } from 'luxon';
import { createHash } from 'node:crypto';
import { dirname, extname, join } from 'node:path';
import { Readable, Writable } from 'node:stream';
import { SystemConfig } from 'src/config';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/asset-upload.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetMetadataKey,
AssetStatus,
AssetType,
AssetVisibility,
ImmichWorker,
JobName,
JobStatus,
QueueName,
StorageFolder,
} from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
import { withRetry } from 'src/utils/misc';
export const MAX_RUFH_INTEROP_VERSION = 8;
@Injectable()
export class AssetUploadService extends BaseService {
// This is used to proactively abort previous requests for the same asset
// when a new one arrives. The previous request still holds the asset lock
// and will prevent the new request from proceeding until the previous one
// times out. As normal client behavior will not have concurrent requests,
// we can assume the previous request has already failed on the client end.
private activeRequests = new Map<string, { req: Readable; startTime: Date }>();
@OnEvent({ name: 'UploadAbort', workers: [ImmichWorker.Api], server: true })
onUploadAbort({ assetId, abortTime }: ArgOf<'UploadAbort'>) {
const entry = this.activeRequests.get(assetId);
if (!entry) {
return false;
}
if (abortTime > entry.startTime) {
entry.req.destroy();
this.activeRequests.delete(assetId);
}
return true;
}
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { uploadComplete, assetData, uploadLength, contentLength, version } = dto;
const isComplete = uploadComplete !== false;
const isResumable = version && uploadComplete !== undefined;
const { backup } = await this.getConfig({ withCache: true });
const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) {
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
const location = `/api/upload/${asset.id}`;
if (isResumable) {
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
// this is a 5xx to indicate the client should do offset retrieval and resume
res.status(500).send('Incomplete asset already exists');
return;
}
}
if (isComplete && uploadLength !== contentLength) {
return this.sendInconsistentLength(res);
}
const location = `/api/upload/${asset.id}`;
if (isResumable) {
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
}
this.addRequest(asset.id, req);
await this.databaseRepository.withUuidLock(asset.id, async () => {
// conventional upload, check status again with lock acquired before overwriting
if (asset.isDuplicate) {
const existingAsset = await this.assetRepository.getCompletionMetadata(asset.id, auth.user.id);
if (existingAsset?.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
}
await this.storageRepository.mkdir(dirname(asset.path));
let checksumBuffer: Buffer | undefined;
const writeStream = asset.isDuplicate
? this.storageRepository.createWriteStream(asset.path)
: this.storageRepository.createOrAppendWriteStream(asset.path);
this.pipe(req, writeStream, contentLength);
if (isComplete) {
const hash = createHash('sha1');
req.on('data', (data: Buffer) => hash.update(data));
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
}
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
if (isResumable) {
this.setCompleteHeader(res, version, uploadComplete);
}
if (!isComplete) {
res.status(201).set('Location', location).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
return;
}
if (dto.checksum.compare(checksumBuffer!) !== 0) {
return await this.sendChecksumMismatch(res, asset.id, asset.path);
}
await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt });
res.status(200).send({ id: asset.id });
});
}
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
const { uploadComplete, uploadLength, uploadOffset, contentLength, version } = dto;
this.setCompleteHeader(res, version, false);
this.addRequest(id, req);
return this.databaseRepository.withUuidLock(id, async () => {
const completionData = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!completionData) {
res.status(404).send('Asset not found');
return;
}
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
if (status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
if (uploadLength && size && size !== uploadLength) {
return this.sendInconsistentLength(res);
}
const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== uploadOffset) {
return this.sendOffsetMismatch(res, expectedOffset, uploadOffset);
}
const newLength = uploadOffset + contentLength;
if (uploadLength !== undefined && newLength > uploadLength) {
res.status(400).send('Upload would exceed declared length');
return;
}
if (contentLength === 0 && !uploadComplete) {
res.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
return;
}
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
this.pipe(req, writeStream, contentLength);
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
this.setCompleteHeader(res, version, uploadComplete);
if (!uploadComplete) {
try {
const offset = await this.getCurrentOffset(path);
res.status(204).setHeader('Upload-Offset', offset.toString()).send();
} catch {
this.logger.error(`Failed to get current offset for ${path} after write`);
res.status(500).send();
}
return;
}
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return await this.sendChecksumMismatch(res, id, path);
}
await this.onComplete({ id, path, fileModifiedAt });
res.status(200).send({ id });
});
}
cancelUpload(auth: AuthDto, assetId: string, res: Response): Promise<void> {
this.abortExistingRequest(assetId);
return this.databaseRepository.withUuidLock(assetId, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
await this.onCancel(assetId, asset.path);
res.status(204).send();
});
}
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
const { backup } = await this.getConfig({ withCache: true });
this.abortExistingRequest(id);
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
const offset = await this.getCurrentOffset(asset.path);
this.setCompleteHeader(res, version, asset.status !== AssetStatus.Partial);
res
.status(204)
.setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', this.getUploadLimits(backup))
.send();
});
}
async getUploadOptions(res: Response): Promise<void> {
const { backup } = await this.getConfig({ withCache: true });
res.status(204).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
}
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
async removeStaleUploads(): Promise<void> {
const config = await this.getConfig({ withCache: false });
const createdBefore = DateTime.now().minus({ hours: config.backup.upload.maxAgeHours }).toJSDate();
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
for await (const asset of assets) {
jobs.push({ name: JobName.PartialAssetCleanup, data: asset });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
}
@OnJob({ name: JobName.PartialAssetCleanup, queue: QueueName.BackgroundTask })
removeStaleUpload({ id }: JobOf<JobName.PartialAssetCleanup>): Promise<JobStatus> {
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetJobRepository.getForPartialAssetCleanupJob(id);
if (!asset) {
return JobStatus.Skipped;
}
const { checksum, fileModifiedAt, path, size } = asset;
try {
const stat = await this.storageRepository.stat(path);
if (size === stat.size && checksum === (await this.cryptoRepository.hashFile(path))) {
await this.onComplete({ id, path, fileModifiedAt });
return JobStatus.Success;
}
} catch (error: any) {
this.logger.debugFn(() => `Failed to check upload file ${path}: ${error.message}`);
}
await this.onCancel(id, path);
return JobStatus.Success;
});
}
async onStart(
auth: AuthDto,
{ assetData, checksum, uploadLength }: StartUploadDto,
): Promise<{ id: string; path: string; status: AssetStatus; isDuplicate: boolean }> {
const assetId = this.cryptoRepository.randomUUID();
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId);
const extension = extname(assetData.filename);
const path = join(folder, `${assetId}${extension}`);
const type = mimeTypes.assetType(path);
if (type === AssetType.Other) {
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
}
this.validateQuota(auth, uploadLength);
try {
await this.assetRepository.createWithMetadata(
{
id: assetId,
ownerId: auth.user.id,
libraryId: null,
checksum,
originalPath: path,
deviceAssetId: assetData.deviceAssetId,
deviceId: assetData.deviceId,
fileCreatedAt: assetData.fileCreatedAt,
fileModifiedAt: assetData.fileModifiedAt,
localDateTime: assetData.fileCreatedAt,
type,
isFavorite: assetData.isFavorite,
livePhotoVideoId: assetData.livePhotoVideoId,
visibility: AssetVisibility.Hidden,
originalFileName: assetData.filename,
status: AssetStatus.Partial,
},
uploadLength,
assetData.iCloudId ? [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: assetData.iCloudId } }] : undefined,
);
} catch (error: any) {
if (!isAssetChecksumConstraint(error)) {
this.logger.error(`Error creating upload asset record: ${error.message}`);
throw new InternalServerErrorException('Error creating asset');
}
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksum);
if (!duplicate) {
throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
}
return { id: duplicate.id, path, status: duplicate.status, isDuplicate: true };
}
return { id: assetId, path, status: AssetStatus.Partial, isDuplicate: false };
}
async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
this.logger.log('Completing upload for asset', id);
const jobData = { name: JobName.AssetExtractMetadata, data: { id, source: 'upload' } } as const;
await withRetry(() => this.assetRepository.setComplete(id));
try {
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} catch (error: any) {
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
}
await withRetry(() => this.jobRepository.queue(jobData));
}
async onCancel(assetId: string, path: string): Promise<void> {
this.logger.log('Cancelling upload for asset', assetId);
await withRetry(() => this.storageRepository.unlink(path));
await withRetry(() => this.assetRepository.removeAndDecrementQuota(assetId));
}
private addRequest(assetId: string, req: Readable) {
const addTime = new Date();
const activeRequest = { req, startTime: addTime };
this.abortExistingRequest(assetId, addTime);
this.activeRequests.set(assetId, activeRequest);
req.on('close', () => {
if (this.activeRequests.get(assetId)?.req === req) {
this.activeRequests.delete(assetId);
}
});
}
private abortExistingRequest(assetId: string, abortTime = new Date()) {
const abortEvent = { assetId, abortTime };
// only emit if we didn't just abort it ourselves
if (!this.onUploadAbort(abortEvent)) {
this.eventRepository.serverSend('UploadAbort', abortEvent);
}
}
private pipe(req: Readable, writeStream: Writable, size: number) {
let receivedLength = 0;
req.on('data', (data: Buffer) => {
receivedLength += data.length;
if (!writeStream.write(data)) {
req.pause();
writeStream.once('drain', () => req.resume());
}
});
req.on('close', () => {
if (receivedLength < size) {
writeStream.emit('error', new Error('Request closed before all data received'));
}
writeStream.end();
});
}
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number, limits: string): void {
if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket
socket.write(
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
`Location: ${location}\r\n` +
`Upload-Limit: ${limits}\r\n` +
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
);
}
}
private sendInconsistentLength(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
title: 'inconsistent length values for upload',
});
}
private sendAlreadyCompleted(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
title: 'upload is already completed',
});
}
private sendOffsetMismatch(res: Response, expected: number, actual: number): void {
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource',
'expected-offset': expected,
'provided-offset': actual,
});
}
private sendChecksumMismatch(res: Response, assetId: string, path: string) {
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
res.status(460).send('File on server does not match provided checksum');
return this.onCancel(assetId, path);
}
private validateQuota(auth: AuthDto, size: number): void {
const { quotaSizeInBytes: quotaLimit, quotaUsageInBytes: currentUsage } = auth.user;
if (quotaLimit === null) {
return;
}
if (quotaLimit < currentUsage + size) {
throw new BadRequestException('Quota has been exceeded!');
}
}
private async getCurrentOffset(path: string): Promise<number> {
try {
const stat = await this.storageRepository.stat(path);
return stat.size;
} catch (error: any) {
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
return 0;
}
throw error;
}
}
private setCompleteHeader(res: Response, interopVersion: number | undefined, isComplete: boolean): void {
if (interopVersion === undefined || interopVersion > 3) {
res.setHeader('Upload-Complete', isComplete ? '?1' : '?0');
} else {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
}
}
private getUploadLimits({ upload }: SystemConfig['backup']) {
return `min-size=1, max-age=${upload.maxAgeHours * 3600}`;
}
}

View File

@@ -3,6 +3,7 @@ import { AlbumService } from 'src/services/album.service';
import { ApiKeyService } from 'src/services/api-key.service';
import { ApiService } from 'src/services/api.service';
import { AssetMediaService } from 'src/services/asset-media.service';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { AssetService } from 'src/services/asset.service';
import { AuditService } from 'src/services/audit.service';
import { AuthAdminService } from 'src/services/auth-admin.service';
@@ -47,6 +48,7 @@ export const services = [
AlbumService,
ApiService,
AssetMediaService,
AssetUploadService,
AssetService,
AuditService,
AuthService,

View File

@@ -48,6 +48,7 @@ describe(JobService.name, () => {
{ name: JobName.UserSyncUsage },
{ name: JobName.AssetGenerateThumbnailsQueueAll, data: { force: false } },
{ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } },
{ name: JobName.PartialAssetCleanupQueueAll },
]);
});
});

View File

@@ -302,6 +302,10 @@ export class JobService extends BaseService {
jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } });
}
if (config.nightlyTasks.removeStaleUploads) {
jobs.push({ name: JobName.PartialAssetCleanupQueueAll });
}
await this.jobRepository.queueAll(jobs);
}

View File

@@ -46,6 +46,9 @@ const updatedConfig = Object.freeze<SystemConfig>({
cronExpression: '0 02 * * *',
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 30,
@@ -115,6 +118,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
missingThumbnails: true,
generateMemories: true,
syncQuotaUsage: true,
removeStaleUploads: true,
},
reverseGeocoding: {
enabled: true,

View File

@@ -352,6 +352,8 @@ export type JobItem =
| { name: JobName.PersonCleanup; data?: IBaseJob }
| { name: JobName.AssetDelete; data: IAssetDeleteJob }
| { name: JobName.AssetDeleteCheck; data?: IBaseJob }
| { name: JobName.PartialAssetCleanup; data: IEntityJob }
| { name: JobName.PartialAssetCleanupQueueAll; data?: IBaseJob }
// Library Management
| { name: JobName.LibrarySyncFiles; data: ILibraryFileJob }

View File

@@ -99,7 +99,7 @@ export const getKyselyConfig = (
}),
}),
log(event) {
if (event.level === 'error') {
if (event.level === 'error' && (event.error as PostgresError).constraint_name !== ASSET_CHECKSUM_CONSTRAINT) {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,

View File

@@ -14,6 +14,7 @@ import {
import _ from 'lodash';
import { writeFileSync } from 'node:fs';
import path from 'node:path';
import { setTimeout } from 'node:timers/promises';
import picomatch from 'picomatch';
import parse from 'picomatch/lib/parse';
import { SystemConfig } from 'src/config';
@@ -324,3 +325,18 @@ export const globToSqlPattern = (glob: string) => {
export function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
export async function withRetry<T>(operation: () => Promise<T>, retries: number = 2, delay: number = 100): Promise<T> {
let lastError: any;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
return await operation();
} catch (error: any) {
lastError = error;
}
if (attempt < retries) {
await setTimeout(delay);
}
}
throw lastError;
}

View File

@@ -1,5 +1,35 @@
import { BadRequestException } from '@nestjs/common';
import { plainToInstance } from 'class-transformer';
import { validateSync } from 'class-validator';
export const fromChecksum = (checksum: string): Buffer => {
return Buffer.from(checksum, checksum.length === 28 ? 'base64' : 'hex');
};
export const fromMaybeArray = <T>(param: T | T[]) => (Array.isArray(param) ? param[0] : param);
export function validateSyncOrReject<T extends object>(cls: new () => T, obj: any): T {
const dto = plainToInstance(cls, obj, { excludeExtraneousValues: true });
const errors = validateSync(dto);
if (errors.length === 0) {
return dto;
}
const constraints = [];
for (const error of errors) {
if (error.constraints) {
constraints.push(...Object.values(error.constraints));
}
if (!error.children) {
continue;
}
for (const child of error.children) {
if (child.constraints) {
constraints.push(...Object.values(child.constraints));
}
}
}
throw new BadRequestException(constraints);
}

View File

@@ -45,5 +45,9 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
upsertMetadata: vitest.fn(),
getMetadataByKey: vitest.fn(),
deleteMetadataByKey: vitest.fn(),
getCompletionMetadata: vitest.fn(),
createWithMetadata: vitest.fn(),
removeAndDecrementQuota: vitest.fn(),
setComplete: vitest.fn(),
};
};

View File

@@ -20,6 +20,7 @@ export const newDatabaseRepositoryMock = (): Mocked<RepositoryInterface<Database
prewarm: vitest.fn(),
runMigrations: vitest.fn(),
withLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
withUuidLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
tryLock: vitest.fn(),
isBusy: vitest.fn(),
wait: vitest.fn(),

View File

@@ -51,6 +51,7 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
readFile: vitest.fn(),
createFile: vitest.fn(),
createWriteStream: vitest.fn(),
createOrAppendWriteStream: vitest.fn(),
createOrOverwriteFile: vitest.fn(),
existsSync: vitest.fn(),
overwriteFile: vitest.fn(),
@@ -58,6 +59,7 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
unlinkDir: vitest.fn().mockResolvedValue(true),
removeEmptyDirs: vitest.fn(),
checkFileExists: vitest.fn(),
mkdir: vitest.fn(),
mkdirSync: vitest.fn(),
checkDiskUsage: vitest.fn(),
readdir: vitest.fn(),

View File

@@ -44,7 +44,7 @@
"geo-coordinates-parser": "^1.7.4",
"geojson": "^0.5.0",
"handlebars": "^4.7.8",
"happy-dom": "^20.0.0",
"happy-dom": "^18.0.1",
"intl-messageformat": "^10.7.11",
"justified-layout": "^4.1.0",
"lodash-es": "^4.17.21",
@@ -89,7 +89,7 @@
"eslint-plugin-unicorn": "^61.0.2",
"factory.ts": "^1.4.1",
"globals": "^16.0.0",
"happy-dom": "^20.0.0",
"happy-dom": "^18.0.1",
"prettier": "^3.4.2",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-sort-json": "^4.1.1",

View File

@@ -134,12 +134,26 @@
element.scrollTop = top;
}
};
const scrollBy = (y: number) => {
if (element) {
element.scrollBy(0, y);
}
};
const scrollToTop = () => {
scrollTo(0);
};
const getAssetHeight = (assetId: string, monthGroup: MonthGroup) => monthGroup.findAssetAbsolutePosition(assetId);
const getAssetHeight = (assetId: string, monthGroup: MonthGroup) => {
// the following method may trigger any layouts, so need to
// handle any scroll compensation that may have been set
const height = monthGroup!.findAssetAbsolutePosition(assetId);
while (timelineManager.scrollCompensation.monthGroup) {
handleScrollCompensation(timelineManager.scrollCompensation);
timelineManager.clearScrollCompensation();
}
return height;
};
const assetIsVisible = (assetTop: number): boolean => {
if (!element) {
@@ -232,6 +246,19 @@
// note: don't throttle, debounch, or otherwise do this function async - it causes flicker
const updateSlidingWindow = () => timelineManager.updateSlidingWindow(element?.scrollTop || 0);
const handleScrollCompensation = ({ heightDelta, scrollTop }: { heightDelta?: number; scrollTop?: number }) => {
if (heightDelta !== undefined) {
scrollBy(heightDelta);
} else if (scrollTop !== undefined) {
scrollTo(scrollTop);
}
// Yes, updateSlideWindow() is called by the onScroll event triggered as a result of
// the above calls. However, this delay is enough time to set the intersecting property
// of the monthGroup to false, then true, which causes the DOM nodes to be recreated,
// causing bad perf, and also, disrupting focus of those elements.
updateSlidingWindow();
};
const topSectionResizeObserver: OnResizeCallback = ({ height }) => (timelineManager.topSectionHeight = height);
onMount(() => {
@@ -639,6 +666,7 @@
onSelect={({ title, assets }) => handleGroupSelect(timelineManager, title, assets)}
onSelectAssetCandidates={handleSelectAssetCandidates}
onSelectAssets={handleSelectAssets}
onScrollCompensation={handleScrollCompensation}
{customLayout}
{onThumbnailClick}
/>

View File

@@ -33,6 +33,7 @@
onSelect: ({ title, assets }: { title: string; assets: TimelineAsset[] }) => void;
onSelectAssets: (asset: TimelineAsset) => void;
onSelectAssetCandidates: (asset: TimelineAsset | null) => void;
onScrollCompensation: (compensation: { heightDelta?: number; scrollTop?: number }) => void;
onThumbnailClick?: (
asset: TimelineAsset,
timelineManager: TimelineManager,
@@ -58,7 +59,7 @@
onSelect,
onSelectAssets,
onSelectAssetCandidates,
onScrollCompensation,
onThumbnailClick,
}: Props = $props();
@@ -133,6 +134,13 @@
});
return getDateLocaleString(date);
};
$effect.root(() => {
if (timelineManager.scrollCompensation.monthGroup === monthGroup) {
onScrollCompensation(timelineManager.scrollCompensation);
timelineManager.clearScrollCompensation();
}
});
</script>
{#each filterIntersecting(monthGroup.dayGroups) as dayGroup, groupIndex (dayGroup.day)}

View File

@@ -55,7 +55,7 @@ export function calculateMonthGroupIntersecting(
}
/**
* Calculate intersection for viewer assets with additional parameters like header height
* Calculate intersection for viewer assets with additional parameters like header height and scroll compensation
*/
export function calculateViewerAssetIntersecting(
timelineManager: TimelineManager,
@@ -64,8 +64,12 @@ export function calculateViewerAssetIntersecting(
expandTop: number = INTERSECTION_EXPAND_TOP,
expandBottom: number = INTERSECTION_EXPAND_BOTTOM,
) {
const topWindow = timelineManager.visibleWindow.top - timelineManager.headerHeight - expandTop;
const bottomWindow = timelineManager.visibleWindow.bottom + timelineManager.headerHeight + expandBottom;
const scrollCompensationHeightDelta = timelineManager.scrollCompensation?.heightDelta ?? 0;
const topWindow =
timelineManager.visibleWindow.top - timelineManager.headerHeight - expandTop + scrollCompensationHeightDelta;
const bottomWindow =
timelineManager.visibleWindow.bottom + timelineManager.headerHeight + expandBottom + scrollCompensationHeightDelta;
const positionBottom = positionTop + positionHeight;

View File

@@ -36,6 +36,7 @@ export class MonthGroup {
#initialCount: number = 0;
#sortOrder: AssetOrder = AssetOrder.Desc;
percent: number = $state(0);
assetsCount: number = $derived(
this.isLoaded
@@ -241,31 +242,42 @@ export class MonthGroup {
if (this.#height === height) {
return;
}
let needsIntersectionUpdate = false;
const timelineManager = this.timelineManager;
const index = timelineManager.months.indexOf(this);
const { timelineManager: store, percent } = this;
const index = store.months.indexOf(this);
const heightDelta = height - this.#height;
this.#height = height;
const prevMonthGroup = timelineManager.months[index - 1];
const prevMonthGroup = store.months[index - 1];
if (prevMonthGroup) {
const newTop = prevMonthGroup.#top + prevMonthGroup.#height;
if (this.#top !== newTop) {
this.#top = newTop;
}
}
if (heightDelta === 0) {
return;
}
for (let cursor = index + 1; cursor < timelineManager.months.length; cursor++) {
for (let cursor = index + 1; cursor < store.months.length; cursor++) {
const monthGroup = this.timelineManager.months[cursor];
const newTop = monthGroup.#top + heightDelta;
if (monthGroup.#top !== newTop) {
monthGroup.#top = newTop;
needsIntersectionUpdate = true;
}
}
if (needsIntersectionUpdate) {
timelineManager.updateIntersections();
if (store.topIntersectingMonthGroup) {
const currentIndex = store.months.indexOf(store.topIntersectingMonthGroup);
if (currentIndex > 0) {
if (index < currentIndex) {
store.scrollCompensation = {
heightDelta,
scrollTop: undefined,
monthGroup: this,
};
} else if (percent > 0) {
const top = this.top + height * percent;
store.scrollCompensation = {
heightDelta: undefined,
scrollTop: top,
monthGroup: this,
};
}
}
}
}

View File

@@ -68,7 +68,7 @@ describe('TimelineManager', () => {
it('should load months in viewport', () => {
expect(sdkMock.getTimeBuckets).toBeCalledTimes(1);
expect(sdkMock.getTimeBucket).toHaveBeenCalledTimes(3);
expect(sdkMock.getTimeBucket).toHaveBeenCalledTimes(2);
});
it('calculates month height', () => {
@@ -82,13 +82,13 @@ describe('TimelineManager', () => {
expect.arrayContaining([
expect.objectContaining({ year: 2024, month: 3, height: 165.5 }),
expect.objectContaining({ year: 2024, month: 2, height: 11_996 }),
expect.objectContaining({ year: 2024, month: 1, height: 48 }),
expect.objectContaining({ year: 2024, month: 1, height: 286 }),
]),
);
});
it('calculates timeline height', () => {
expect(timelineManager.timelineHeight).toBe(12_209.5);
expect(timelineManager.timelineHeight).toBe(12_447.5);
});
});

View File

@@ -5,7 +5,7 @@ import { authManager } from '$lib/managers/auth-manager.svelte';
import { CancellableTask } from '$lib/utils/cancellable-task';
import { toTimelineAsset, type TimelineDateTime, type TimelineYearMonth } from '$lib/utils/timeline-util';
import { debounce, isEqual } from 'lodash-es';
import { clamp, debounce, isEqual } from 'lodash-es';
import { SvelteDate, SvelteMap, SvelteSet } from 'svelte/reactivity';
import { updateIntersectionMonthGroup } from '$lib/managers/timeline-manager/internal/intersection-support.svelte';
@@ -49,6 +49,8 @@ export class TimelineManager {
scrubberMonths: ScrubberMonth[] = $state([]);
scrubberTimelineHeight: number = $state(0);
topIntersectingMonthGroup: MonthGroup | undefined = $state();
visibleWindow = $derived.by(() => ({
top: this.#scrollTop,
bottom: this.#scrollTop + this.viewportHeight,
@@ -85,6 +87,15 @@ export class TimelineManager {
#suspendTransitions = $state(false);
#resetScrolling = debounce(() => (this.#scrolling = false), 1000);
#resetSuspendTransitions = debounce(() => (this.suspendTransitions = false), 1000);
scrollCompensation: {
heightDelta: number | undefined;
scrollTop: number | undefined;
monthGroup: MonthGroup | undefined;
} = $state({
heightDelta: 0,
scrollTop: 0,
monthGroup: undefined,
});
constructor() {}
@@ -230,12 +241,38 @@ export class TimelineManager {
}
}
clearScrollCompensation() {
this.scrollCompensation = {
heightDelta: undefined,
scrollTop: undefined,
monthGroup: undefined,
};
}
updateIntersections() {
if (!this.isInitialized || this.visibleWindow.bottom === this.visibleWindow.top) {
return;
}
let topIntersectingMonthGroup = undefined;
for (const month of this.months) {
updateIntersectionMonthGroup(this, month);
if (!topIntersectingMonthGroup && month.actuallyIntersecting) {
topIntersectingMonthGroup = month;
}
}
if (topIntersectingMonthGroup !== undefined && this.topIntersectingMonthGroup !== topIntersectingMonthGroup) {
this.topIntersectingMonthGroup = topIntersectingMonthGroup;
}
for (const month of this.months) {
if (month === this.topIntersectingMonthGroup) {
this.topIntersectingMonthGroup.percent = clamp(
(this.visibleWindow.top - this.topIntersectingMonthGroup.top) / this.topIntersectingMonthGroup.height,
0,
1,
);
} else {
month.percent = 0;
}
}
}
@@ -364,10 +401,10 @@ export class TimelineManager {
return;
}
const executionStatus = await monthGroup.loader?.execute(async (signal: AbortSignal) => {
const result = await monthGroup.loader?.execute(async (signal: AbortSignal) => {
await loadFromTimeBuckets(this, monthGroup, this.#options, signal);
}, cancelable);
if (executionStatus === 'LOADED') {
if (result === 'LOADED') {
updateIntersectionMonthGroup(this, monthGroup);
}
}