Compare commits

..

15 Commits

Author SHA1 Message Date
bo0tzz
2d5f31751a test: try out new create-workflow-token-action 2025-10-14 16:02:44 +02:00
renovate[bot]
4d41fa08ad fix(deps): update typescript-projects (#22918)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Daniel Dietzler <mail@ddietzler.dev>
2025-10-14 12:22:13 +02:00
renovate[bot]
6d00930082 chore(deps): update redis:6.2-alpine docker digest to 77697a7 (#22915)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 11:16:21 +02:00
renovate[bot]
e4d2c4926c fix(deps): update dependency kysely-postgres-js to v3 (#22924)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 11:11:52 +02:00
shenlong
dbee133764 fix: show dialog before delete local action (#22280)
* fix: show dialog on delete local action

# Conflicts:
#	mobile/lib/repositories/asset_media.repository.dart

* button style

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex <alex.tran1502@gmail.com>
2025-10-14 07:20:14 +00:00
Brandon Wees
8473dab684 fix: shared album control permissions (#22435)
* fix: shared album control permissions

* fix: properly display "add photos"

* fix: dont allow modification of album order

* fix: album title/description edit from app bar

* chore: code review changes

* chore: format translations

* chore: lintings
2025-10-14 08:04:22 +05:30
Min Idzelis
146973b072 fix: re-add scroll compensation (efficiently) (#22848)
* fix: re-add scroll compensation (efficient)

* Rename showSkeleton to invisible. Adjust skeleton margins, invisible support.

* Fix faulty logic, simplify

* Calculate ratios and determine compensation strategy: height comp for above/partiality visible, month-scroll comp within a fully visible month.

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2025-10-14 02:16:05 +00:00
renovate[bot]
e8ca7f235c chore(deps): update ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0 docker digest to bcf6335 (#22913)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-13 21:14:33 -05:00
renovate[bot]
d411594c84 chore(deps): update ghcr.io/immich-app/postgres:14-vectorchord0.3.0 docker digest to 6f3e9d2 (#22912)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-13 21:12:40 -05:00
shenlong
cf52b879b1 fix: ios skip posting hash response after detached from engine (#22695)
* skip posting message after detached from engine

* review changes

* cancel plugin before destroying engine

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex <alex.tran1502@gmail.com>
2025-10-13 21:09:12 -05:00
luzpaz
46869f664d fix: various typos (#22867)
Found via `codespell -q 3 -S "*.svg,./i18n,./docs/package-lock.json,./readme_i18n,./mobile/assets/i18n" -L afterall,devlop,finaly,inout,nd,optin,renderd,sade`
2025-10-13 15:25:23 +05:30
renovate[bot]
f2b553182a chore(deps): update github-actions (#22793)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-12 12:59:36 +02:00
renovate[bot]
8fe54a4de1 fix(deps): update dependency happy-dom to v20 [security] (#22846)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-11 13:17:25 +02:00
Min Idzelis
ce4e8fa6ba feat: (perf) remove scroll compensation (#22837) 2025-10-10 15:48:29 -04:00
Brandon Wees
efa21af6f9 fix: only cast to device if the asset is a RemoteAsset (#22805) 2025-10-10 11:13:31 -05:00
97 changed files with 907 additions and 5201 deletions

View File

@@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
uses: github/codeql-action/autobuild@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
category: '/language:${{matrix.language}}'

View File

@@ -124,7 +124,7 @@ jobs:
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
permissions:
contents: read
actions: read
@@ -147,7 +147,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
permissions:
contents: read
actions: read

View File

@@ -128,7 +128,7 @@ jobs:
name: release-apk-signed
- name: Create draft release
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
uses: softprops/action-gh-release@aec2ec56f94eb8180ceec724245f64ef008b89f5 # v2.4.0
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}

View File

@@ -41,10 +41,18 @@ jobs:
run:
working-directory: ./mobile
steps:
- name: Generate token
id: token
uses: immich-app/devtools/actions/create-workflow-token@feat/create-workflow-token-action
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
@@ -58,7 +66,7 @@ jobs:
- name: Install DCM
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
github-token: ${{ steps.token.outputs.token }}
version: auto
working-directory: ./mobile

View File

@@ -581,7 +581,7 @@ jobs:
contents: read
services:
postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:da52bbead5d818adaa8077c8dcdaad0aaf93038c31ad8348b51f9f0ec1310a4d
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:dbf18b3ffea4a81434c65b71e20d27203baf903a0275f4341e4c16dfd901fd67
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres

View File

@@ -140,7 +140,7 @@ services:
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:41eacbe83eca995561fe43814fd4891e16e39632806253848efaf04d3c8a8b84
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
env_file:
- .env
environment:

View File

@@ -63,7 +63,7 @@ services:
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:41eacbe83eca995561fe43814fd4891e16e39632806253848efaf04d3c8a8b84
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
env_file:
- .env
environment:

View File

@@ -56,7 +56,7 @@ services:
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:41eacbe83eca995561fe43814fd4891e16e39632806253848efaf04d3c8a8b84
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}

View File

@@ -22,7 +22,7 @@ For organizations seeking to resell Immich, we have established the following gu
- Do not misrepresent your reseller site or services as being officially affiliated with or endorsed by Immich or our development team.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase licenses directy from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase licenses directly from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
When in doubt or if you have an edge case scenario, we encourage you to contact us directly via email to discuss the use of our trademark. We can provide clear guidance on what is acceptable and what is not. You can reach out at: questions@immich.app

View File

@@ -4,7 +4,7 @@ Immich supports the Google's Cast protocol so that photos and videos can be cast
## Enable Google Cast Support
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retreive them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retrieve them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
You can enable Google Cast support through `Account Settings > Features > Cast > Google Cast`

View File

@@ -35,10 +35,10 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:2185e741f4c1e7b0ea9ca1e163a3767c4270a73086b6bbea2049a7203212fb7f
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:11ced39d65a92a54d12890ced6a26cc2003f92697d6f0d4d944b98459dba7138
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
environment:
POSTGRES_PASSWORD: postgres

View File

@@ -53,8 +53,5 @@
},
"volta": {
"node": "22.20.0"
},
"dependencies": {
"structured-headers": "^2.0.2"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -561,16 +561,6 @@ export const utils = {
await utils.waitForQueueFinish(accessToken, 'sidecar');
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
},
downloadAsset: async (accessToken: string, id: string) => {
const downloadedRes = await fetch(`${baseUrl}/api/assets/${id}/original`, {
headers: asBearerAuth(accessToken),
});
if (!downloadedRes.ok) {
throw new Error(`Failed to download asset ${id}: ${downloadedRes.status} ${await downloadedRes.text()}`);
}
return await downloadedRes.blob();
},
};
utils.initSdk();

View File

@@ -1039,6 +1039,7 @@
"exif_bottom_sheet_description_error": "Error updating description",
"exif_bottom_sheet_details": "DETAILS",
"exif_bottom_sheet_location": "LOCATION",
"exif_bottom_sheet_no_description": "No description",
"exif_bottom_sheet_people": "PEOPLE",
"exif_bottom_sheet_person_add_person": "Add name",
"exit_slideshow": "Exit Slideshow",

View File

@@ -1,7 +1,7 @@
[tools]
node = "22.20.0"
flutter = "3.35.5"
pnpm = "10.18.0"
pnpm = "10.18.1"
[tools."github:CQLabs/homebrew-dcm"]
version = "1.30.0"

View File

@@ -304,7 +304,6 @@ interface NativeSyncApi {
fun getAssetsCountSince(albumId: String, timestamp: Long): Long
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
fun uploadAsset(callback: (Result<Boolean>) -> Unit)
fun cancelHashing()
companion object {
@@ -468,24 +467,6 @@ interface NativeSyncApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
api.uploadAsset{ result: Result<Boolean> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(MessagesPigeonUtils.wrapError(error))
} else {
val data = result.getOrNull()
reply.reply(MessagesPigeonUtils.wrapResult(data))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing$separatedMessageChannelSuffix", codec)
if (api != null) {

View File

@@ -131,10 +131,13 @@
/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
B231F52D2E93A44A00BC45D1 /* Core */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = Core;
sourceTree = "<group>";
};
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Sync;
sourceTree = "<group>";
};
@@ -247,6 +250,7 @@
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
B231F52D2E93A44A00BC45D1 /* Core */,
B25D37792E72CA15008B6CA7 /* Connectivity */,
B21E34A62E5AF9760031FDB9 /* Background */,
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
@@ -331,6 +335,7 @@
F0B57D482DF764BE00DC5BCC /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
B231F52D2E93A44A00BC45D1 /* Core */,
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
);
name = Runner;
@@ -521,10 +526,14 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
@@ -553,10 +562,14 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";

View File

@@ -20,7 +20,7 @@ import UIKit
GeneratedPluginRegistrant.register(with: self)
let controller: FlutterViewController = window?.rootViewController as! FlutterViewController
AppDelegate.registerPlugins(binaryMessenger: controller.binaryMessenger)
AppDelegate.registerPlugins(with: controller.engine)
BackgroundServicePlugin.register(with: self.registrar(forPlugin: "BackgroundServicePlugin")!)
BackgroundServicePlugin.registerBackgroundProcessing()
@@ -51,9 +51,13 @@ import UIKit
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
public static func registerPlugins(binaryMessenger: FlutterBinaryMessenger) {
NativeSyncApiSetup.setUp(binaryMessenger: binaryMessenger, api: NativeSyncApiImpl())
ThumbnailApiSetup.setUp(binaryMessenger: binaryMessenger, api: ThumbnailApiImpl())
BackgroundWorkerFgHostApiSetup.setUp(binaryMessenger: binaryMessenger, api: BackgroundWorkerApiImpl())
public static func registerPlugins(with engine: FlutterEngine) {
NativeSyncApiImpl.register(with: engine.registrar(forPlugin: NativeSyncApiImpl.name)!)
ThumbnailApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: ThumbnailApiImpl())
BackgroundWorkerFgHostApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: BackgroundWorkerApiImpl())
}
public static func cancelPlugins(with engine: FlutterEngine) {
(engine.valuePublished(byPlugin: NativeSyncApiImpl.name) as? NativeSyncApiImpl)?.detachFromEngine()
}
}

View File

@@ -95,7 +95,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
// Register plugins in the new engine
GeneratedPluginRegistrant.register(with: engine)
// Register custom plugins
AppDelegate.registerPlugins(binaryMessenger: engine.binaryMessenger)
AppDelegate.registerPlugins(with: engine)
flutterApi = BackgroundWorkerFlutterApi(binaryMessenger: engine.binaryMessenger)
BackgroundWorkerBgHostApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: self)
@@ -168,6 +168,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
}
isComplete = true
AppDelegate.cancelPlugins(with: engine)
engine.destroyContext()
flutterApi = nil
completionHandler(success)

View File

@@ -0,0 +1,17 @@
class ImmichPlugin: NSObject {
var detached: Bool
override init() {
detached = false
super.init()
}
func detachFromEngine() {
self.detached = true
}
func completeWhenActive<T>(for completion: @escaping (T) -> Void, with value: T) {
guard !self.detached else { return }
completion(value)
}
}

View File

@@ -363,7 +363,6 @@ protocol NativeSyncApi {
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
func uploadAsset(completion: @escaping (Result<Bool, Error>) -> Void)
func cancelHashing() throws
}
@@ -520,21 +519,6 @@ class NativeSyncApiSetup {
} else {
hashAssetsChannel.setMessageHandler(nil)
}
let uploadAssetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
uploadAssetChannel.setMessageHandler { _, reply in
api.uploadAsset { result in
switch result {
case .success(let res):
reply(wrapResult(res))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
uploadAssetChannel.setMessageHandler(nil)
}
let cancelHashingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
cancelHashingChannel.setMessageHandler { _, reply in

View File

@@ -17,13 +17,25 @@ struct AssetWrapper: Hashable, Equatable {
}
}
class NativeSyncApiImpl: NativeSyncApi {
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
static let name = "NativeSyncApi"
static func register(with registrar: any FlutterPluginRegistrar) {
let instance = NativeSyncApiImpl()
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
registrar.publish(instance)
}
func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
super.detachFromEngine()
}
private let defaults: UserDefaults
private let changeTokenKey = "immich:changeToken"
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
private let recoveredAlbumSubType = 1000000219
private var hashTask: Task<Void, Error>?
private var hashTask: Task<Void?, Error>?
private static let hashCancelledCode = "HASH_CANCELLED"
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
@@ -272,7 +284,7 @@ class NativeSyncApiImpl: NativeSyncApi {
}
if Task.isCancelled {
return completion(Self.hashCancelled)
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
await withTaskGroup(of: HashResult?.self) { taskGroup in
@@ -280,7 +292,7 @@ class NativeSyncApiImpl: NativeSyncApi {
results.reserveCapacity(assets.count)
for asset in assets {
if Task.isCancelled {
return completion(Self.hashCancelled)
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
taskGroup.addTask {
guard let self = self else { return nil }
@@ -290,7 +302,7 @@ class NativeSyncApiImpl: NativeSyncApi {
for await result in taskGroup {
guard let result = result else {
return completion(Self.hashCancelled)
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
results.append(result)
}
@@ -299,7 +311,7 @@ class NativeSyncApiImpl: NativeSyncApi {
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
}
completion(.success(results))
return self?.completeWhenActive(for: completion, with: .success(results))
}
}
}
@@ -363,38 +375,4 @@ class NativeSyncApiImpl: NativeSyncApi {
PHAssetResourceManager.default().cancelDataRequest(requestId)
})
}
func uploadAsset(completion: @escaping (Result<Bool, Error>) -> Void) {
let bufferSize = 200 * 1024 * 1024
var buffer = Data(count: bufferSize)
buffer.withUnsafeMutableBytes { bufferPointer in
arc4random_buf(bufferPointer.baseAddress!, bufferSize)
}
var hasher = Insecure.SHA1()
hasher.update(data: buffer)
let checksum = Data(hasher.finalize()).base64EncodedString()
let tempDirectory = FileManager.default.temporaryDirectory
let tempFileURL = tempDirectory.appendingPathComponent("buffer.tmp")
do {
try buffer.write(to: tempFileURL)
print("File saved to: \(tempFileURL.path)")
} catch {
print("Error writing file: \(error)")
return completion(Result.failure(error))
}
let config = URLSessionConfiguration.background(withIdentifier: "app.mertalev.immich.upload")
let session = URLSession(configuration: config)
var request = URLRequest(url: URL(string: "https://<hardcoded-host>/api/upload")!)
request.httpMethod = "POST"
request.setValue("<hardcoded-api-key>", forHTTPHeaderField: "X-Api-Key")
request.setValue("filename=\"test-image.jpg\", device-asset-id=\"rufh\", device-id=\"test\", file-created-at=\"2025-01-02T00:00:00.000Z\", file-modified-at=\"2025-01-01T00:00:00.000Z\", is-favorite, icloud-id=\"example-icloud-id\"", forHTTPHeaderField: "X-Immich-Asset-Data")
request.setValue("sha=:\(checksum):", forHTTPHeaderField: "Repr-Digest")
let task = session.uploadTask(with: request, fromFile: tempFileURL)
task.resume()
completion(Result.success(true))
}
}

View File

@@ -114,7 +114,7 @@ struct ImmichMemoryProvider: TimelineProvider {
}
}
// If we didnt add any memory images (some failure occured or no images in memory),
// If we didn't add any memory images (some failure occurred or no images in memory),
// default to 12 hours of random photos
if entries.count == 0 {
// this must be a do/catch since we need to

View File

@@ -56,6 +56,8 @@ sealed class BaseAsset {
// Overridden in subclasses
AssetState get storage;
String? get localId;
String? get remoteId;
String get heroTag;
@override

View File

@@ -2,12 +2,12 @@ part of 'base_asset.model.dart';
class LocalAsset extends BaseAsset {
final String id;
final String? remoteId;
final String? remoteAssetId;
final int orientation;
const LocalAsset({
required this.id,
this.remoteId,
String? remoteId,
required super.name,
super.checksum,
required super.type,
@@ -19,7 +19,13 @@ class LocalAsset extends BaseAsset {
super.isFavorite = false,
super.livePhotoVideoId,
this.orientation = 0,
});
}) : remoteAssetId = remoteId;
@override
String? get localId => id;
@override
String? get remoteId => remoteAssetId;
@override
AssetState get storage => remoteId == null ? AssetState.local : AssetState.merged;

View File

@@ -5,7 +5,7 @@ enum AssetVisibility { timeline, hidden, archive, locked }
// Model for an asset stored in the server
class RemoteAsset extends BaseAsset {
final String id;
final String? localId;
final String? localAssetId;
final String? thumbHash;
final AssetVisibility visibility;
final String ownerId;
@@ -13,7 +13,7 @@ class RemoteAsset extends BaseAsset {
const RemoteAsset({
required this.id,
this.localId,
String? localId,
required super.name,
required this.ownerId,
required super.checksum,
@@ -28,7 +28,13 @@ class RemoteAsset extends BaseAsset {
this.visibility = AssetVisibility.timeline,
super.livePhotoVideoId,
this.stackId,
});
}) : localAssetId = localId;
@override
String? get localId => localAssetId;
@override
String? get remoteId => id;
@override
AssetState get storage => localId == null ? AssetState.remote : AssetState.merged;

View File

@@ -192,6 +192,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
_cancellationToken.cancel();
_logger.info("Cleaning up background worker");
final cleanupFutures = [
nativeSyncApi?.cancelHashing(),
workerManager.dispose().catchError((_) async {
// Discard any errors on the dispose call
return;
@@ -201,7 +202,6 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
_drift.close(),
_driftLogger.close(),
backgroundSyncManager?.cancel(),
nativeSyncApi?.cancelHashing(),
];
if (_isar.isOpen) {

View File

@@ -120,6 +120,10 @@ class RemoteAlbumService {
return _repository.getSharedUsers(albumId);
}
Future<AlbumUserRole?> getUserRole(String albumId, String userId) {
return _repository.getUserRole(albumId, userId);
}
Future<List<RemoteAsset>> getAssets(String albumId) {
return _repository.getAssets(albumId);
}

View File

@@ -86,7 +86,7 @@ class StoreService {
_cache.remove(key.id);
}
/// Clears all values from thw store (cache and DB)
/// Clears all values from the store (cache and DB)
Future<void> clear() async {
await _storeRepository.deleteAll();
_cache.clear();

View File

@@ -221,6 +221,15 @@ class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
.get();
}
Future<AlbumUserRole?> getUserRole(String albumId, String userId) async {
final query = _db.remoteAlbumUserEntity.select()
..where((row) => row.albumId.equals(albumId) & row.userId.equals(userId))
..limit(1);
final result = await query.getSingleOrNull();
return result?.role;
}
Future<List<RemoteAsset>> getAssets(String albumId) {
final query = _db.remoteAlbumAssetEntity.select().join([
innerJoin(_db.remoteAssetEntity, _db.remoteAssetEntity.id.equalsExp(_db.remoteAlbumAssetEntity.assetId)),

View File

@@ -15,7 +15,6 @@ import 'package:immich_mobile/presentation/widgets/backup/backup_toggle_button.w
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/backup/backup_album.provider.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/sync_status.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
@@ -142,84 +141,6 @@ class _DriftBackupPageState extends ConsumerState<DriftBackupPage> {
await stopBackup();
},
),
Container(
margin: const EdgeInsets.symmetric(horizontal: 4, vertical: 4),
decoration: BoxDecoration(
borderRadius: const BorderRadius.all(Radius.circular(20)),
gradient: LinearGradient(
colors: [
context.primaryColor.withValues(alpha: 0.5),
context.primaryColor.withValues(alpha: 0.4),
context.primaryColor.withValues(alpha: 0.5),
],
stops: const [0.0, 0.5, 1.0],
begin: Alignment.topLeft,
end: Alignment.bottomRight,
),
boxShadow: [
BoxShadow(
color: context.primaryColor.withValues(alpha: 0.1),
blurRadius: 12,
offset: const Offset(0, 2),
),
],
),
child: Container(
margin: const EdgeInsets.all(1.5),
decoration: BoxDecoration(
borderRadius: const BorderRadius.all(Radius.circular(18.5)),
color: context.colorScheme.surfaceContainerLow,
),
child: Material(
color: context.colorScheme.surfaceContainerLow,
borderRadius: const BorderRadius.all(Radius.circular(20.5)),
child: InkWell(
borderRadius: const BorderRadius.all(Radius.circular(20.5)),
onTap: () => ref.read(nativeSyncApiProvider).uploadAsset(),
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 20, vertical: 16),
child: Row(
children: [
Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(
shape: BoxShape.circle,
gradient: LinearGradient(
colors: [
context.primaryColor.withValues(alpha: 0.2),
context.primaryColor.withValues(alpha: 0.1),
],
),
),
child: Icon(Icons.upload, color: context.primaryColor, size: 24),
),
const SizedBox(width: 16),
Expanded(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Text(
"Upload Asset (for testing)",
style: context.textTheme.titleMedium?.copyWith(
fontWeight: FontWeight.w600,
color: context.primaryColor,
),
),
],
),
],
),
),
],
),
),
),
),
),
),
switch (error) {
BackupError.none => const SizedBox.shrink(),
BackupError.syncFailed => Padding(

View File

@@ -540,34 +540,6 @@ class NativeSyncApi {
}
}
Future<bool> uploadAsset() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.uploadAsset$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else if (pigeonVar_replyList[0] == null) {
throw PlatformException(
code: 'null-error',
message: 'Host platform returned null value for non-null return value.',
);
} else {
return (pigeonVar_replyList[0] as bool?)!;
}
}
Future<void> cancelHashing() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing$pigeonVar_messageChannelSuffix';

View File

@@ -169,9 +169,11 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
context.pushRoute(const DriftActivitiesRoute());
}
void showOptionSheet(BuildContext context) {
Future<void> showOptionSheet(BuildContext context) async {
final user = ref.watch(currentUserProvider);
final isOwner = user != null ? user.id == _album.ownerId : false;
final canAddPhotos =
await ref.read(remoteAlbumServiceProvider).getUserRole(_album.id, user?.id ?? '') == AlbumUserRole.editor;
showModalBottomSheet(
context: context,
@@ -193,22 +195,30 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
context.pop();
}
: null,
onAddPhotos: () async {
await addAssets(context);
context.pop();
},
onToggleAlbumOrder: () async {
await toggleAlbumOrder();
context.pop();
},
onEditAlbum: () async {
context.pop();
await showEditTitleAndDescription(context);
},
onCreateSharedLink: () async {
context.pop();
context.pushRoute(SharedLinkEditRoute(albumId: _album.id));
},
onAddPhotos: isOwner || canAddPhotos
? () async {
await addAssets(context);
context.pop();
}
: null,
onToggleAlbumOrder: isOwner
? () async {
await toggleAlbumOrder();
context.pop();
}
: null,
onEditAlbum: isOwner
? () async {
context.pop();
await showEditTitleAndDescription(context);
}
: null,
onCreateSharedLink: isOwner
? () async {
context.pop();
context.pushRoute(SharedLinkEditRoute(albumId: _album.id));
}
: null,
onShowOptions: () {
context.pop();
context.pushRoute(const DriftAlbumOptionsRoute());
@@ -220,6 +230,9 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
@override
Widget build(BuildContext context) {
final user = ref.watch(currentUserProvider);
final isOwner = user != null ? user.id == _album.ownerId : false;
return PopScope(
onPopInvokedWithResult: (didPop, _) {
if (didPop) {
@@ -243,8 +256,8 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
appBar: RemoteAlbumSliverAppBar(
icon: Icons.photo_album_outlined,
onShowOptions: () => showOptionSheet(context),
onToggleAlbumOrder: () => toggleAlbumOrder(),
onEditTitle: () => showEditTitleAndDescription(context),
onToggleAlbumOrder: isOwner ? () => toggleAlbumOrder() : null,
onEditTitle: isOwner ? () => showEditTitleAndDescription(context) : null,
onActivity: () => showActivity(context),
),
bottomSheet: RemoteAlbumBottomSheet(album: _album),

View File

@@ -8,6 +8,7 @@ import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_bu
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/asset_grid/delete_dialog.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
/// This delete action has the following behavior:
@@ -22,7 +23,17 @@ class DeleteLocalActionButton extends ConsumerWidget {
return;
}
final result = await ref.read(actionProvider.notifier).deleteLocal(source);
bool? backedUpOnly = await showDialog<bool>(
context: context,
builder: (BuildContext context) => DeleteLocalOnlyDialog(onDeleteLocal: (_) {}),
);
if (backedUpOnly == null) {
// User cancelled the dialog
return;
}
final result = await ref.read(actionProvider.notifier).deleteLocal(source, backedUpOnly);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {

View File

@@ -221,10 +221,8 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
context.scaffoldMessenger.hideCurrentSnackBar();
// send image to casting if the server has it
if (asset.hasRemote) {
final remoteAsset = asset as RemoteAsset;
ref.read(castProvider.notifier).loadMedia(remoteAsset, false);
if (asset is RemoteAsset) {
ref.read(castProvider.notifier).loadMedia(asset, false);
} else {
// casting cannot show local assets
context.scaffoldMessenger.clearSnackBars();

View File

@@ -43,7 +43,7 @@ class ViewerBottomBar extends ConsumerWidget {
final actions = <Widget>[
const ShareActionButton(source: ActionSource.viewer),
if (asset.isLocalOnly) const UploadActionButton(source: ActionSource.viewer),
if (asset.type == AssetType.image) const EditImageActionButton(),
if (asset.type == AssetType.image && isOwner) const EditImageActionButton(),
if (isOwner) ...[
if (asset.hasRemote && isOwner && isArchived)
const UnArchiveActionButton(source: ActionSource.viewer)

View File

@@ -140,6 +140,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
final exifInfo = ref.watch(currentAssetExifProvider).valueOrNull;
final cameraTitle = _getCameraInfoTitle(exifInfo);
final isOwner = ref.watch(currentUserProvider)?.id == (asset is RemoteAsset ? asset.ownerId : null);
return SliverList.list(
children: [
@@ -147,10 +148,10 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
_SheetTile(
title: _getDateTime(context, asset),
titleStyle: context.textTheme.bodyMedium?.copyWith(fontWeight: FontWeight.w600),
trailing: asset.hasRemote ? const Icon(Icons.edit, size: 18) : null,
onTap: asset.hasRemote ? () async => await _editDateTime(context, ref) : null,
trailing: asset.hasRemote && isOwner ? const Icon(Icons.edit, size: 18) : null,
onTap: asset.hasRemote && isOwner ? () async => await _editDateTime(context, ref) : null,
),
if (exifInfo != null) _SheetAssetDescription(exif: exifInfo),
if (exifInfo != null) _SheetAssetDescription(exif: exifInfo, isEditable: isOwner),
const SheetPeopleDetails(),
const SheetLocationDetails(),
// Details header
@@ -265,8 +266,9 @@ class _SheetTile extends ConsumerWidget {
class _SheetAssetDescription extends ConsumerStatefulWidget {
final ExifInfo exif;
final bool isEditable;
const _SheetAssetDescription({required this.exif});
const _SheetAssetDescription({required this.exif, this.isEditable = true});
@override
ConsumerState<_SheetAssetDescription> createState() => _SheetAssetDescriptionState();
@@ -312,27 +314,33 @@ class _SheetAssetDescriptionState extends ConsumerState<_SheetAssetDescription>
// Update controller text when EXIF data changes
final currentDescription = currentExifInfo?.description ?? '';
final hintText = (widget.isEditable ? 'exif_bottom_sheet_description' : 'exif_bottom_sheet_no_description').t(
context: context,
);
if (_controller.text != currentDescription && !_descriptionFocus.hasFocus) {
_controller.text = currentDescription;
}
return Padding(
padding: const EdgeInsets.symmetric(horizontal: 16.0, vertical: 8),
child: TextField(
controller: _controller,
keyboardType: TextInputType.multiline,
focusNode: _descriptionFocus,
maxLines: null, // makes it grow as text is added
decoration: InputDecoration(
hintText: 'exif_bottom_sheet_description'.t(context: context),
border: InputBorder.none,
enabledBorder: InputBorder.none,
focusedBorder: InputBorder.none,
disabledBorder: InputBorder.none,
errorBorder: InputBorder.none,
focusedErrorBorder: InputBorder.none,
child: IgnorePointer(
ignoring: !widget.isEditable,
child: TextField(
controller: _controller,
keyboardType: TextInputType.multiline,
focusNode: _descriptionFocus,
maxLines: null, // makes it grow as text is added
decoration: InputDecoration(
hintText: hintText,
border: InputBorder.none,
enabledBorder: InputBorder.none,
focusedBorder: InputBorder.none,
disabledBorder: InputBorder.none,
errorBorder: InputBorder.none,
focusedErrorBorder: InputBorder.none,
),
onTapOutside: (_) => saveDescription(currentExifInfo?.description),
),
onTapOutside: (_) => saveDescription(currentExifInfo?.description),
),
);
}

View File

@@ -45,7 +45,8 @@ class ViewerTopAppBar extends ConsumerWidget implements PreferredSizeWidget {
(previousRouteName != TabShellRoute.name || tabRoute == TabEnum.search) &&
previousRouteName != AssetViewerRoute.name &&
previousRouteName != null &&
previousRouteName != LocalTimelineRoute.name;
previousRouteName != LocalTimelineRoute.name &&
isOwner;
final isShowingSheet = ref.watch(assetViewerProvider.select((state) => state.showingBottomSheet));
int opacity = ref.watch(assetViewerProvider.select((state) => state.backgroundOpacity));

View File

@@ -24,6 +24,7 @@ import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_shee
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
class RemoteAlbumBottomSheet extends ConsumerStatefulWidget {
@@ -53,6 +54,7 @@ class _RemoteAlbumBottomSheetState extends ConsumerState<RemoteAlbumBottomSheet>
Widget build(BuildContext context) {
final multiselect = ref.watch(multiSelectProvider);
final isTrashEnable = ref.watch(serverInfoProvider.select((state) => state.serverFeatures.trash));
final ownsAlbum = ref.watch(currentUserProvider)?.id == widget.album.ownerId;
Future<void> addAssetsToAlbum(RemoteAlbum album) async {
final selectedAssets = multiselect.selectedAssets;
@@ -93,28 +95,35 @@ class _RemoteAlbumBottomSheetState extends ConsumerState<RemoteAlbumBottomSheet>
const ShareActionButton(source: ActionSource.timeline),
if (multiselect.hasRemote) ...[
const ShareLinkActionButton(source: ActionSource.timeline),
const ArchiveActionButton(source: ActionSource.timeline),
const FavoriteActionButton(source: ActionSource.timeline),
if (ownsAlbum) ...[
const ArchiveActionButton(source: ActionSource.timeline),
const FavoriteActionButton(source: ActionSource.timeline),
],
const DownloadActionButton(source: ActionSource.timeline),
isTrashEnable
? const TrashActionButton(source: ActionSource.timeline)
: const DeletePermanentActionButton(source: ActionSource.timeline),
const EditDateTimeActionButton(source: ActionSource.timeline),
const EditLocationActionButton(source: ActionSource.timeline),
const MoveToLockFolderActionButton(source: ActionSource.timeline),
if (multiselect.selectedAssets.length > 1) const StackActionButton(source: ActionSource.timeline),
if (multiselect.hasStacked) const UnStackActionButton(source: ActionSource.timeline),
if (ownsAlbum) ...[
isTrashEnable
? const TrashActionButton(source: ActionSource.timeline)
: const DeletePermanentActionButton(source: ActionSource.timeline),
const EditDateTimeActionButton(source: ActionSource.timeline),
const EditLocationActionButton(source: ActionSource.timeline),
const MoveToLockFolderActionButton(source: ActionSource.timeline),
if (multiselect.selectedAssets.length > 1) const StackActionButton(source: ActionSource.timeline),
if (multiselect.hasStacked) const UnStackActionButton(source: ActionSource.timeline),
],
],
if (multiselect.hasLocal) ...[
const DeleteLocalActionButton(source: ActionSource.timeline),
const UploadActionButton(source: ActionSource.timeline),
],
RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: widget.album.id),
],
slivers: [
const AddToAlbumHeader(),
AlbumSelector(onAlbumSelected: addAssetsToAlbum, onKeyboardExpanded: onKeyboardExpand),
if (ownsAlbum) RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: widget.album.id),
],
slivers: ownsAlbum
? [
const AddToAlbumHeader(),
AlbumSelector(onAlbumSelected: addAssetsToAlbum, onKeyboardExpanded: onKeyboardExpand),
]
: null,
);
}
}

View File

@@ -260,8 +260,15 @@ class ActionNotifier extends Notifier<void> {
}
}
Future<ActionResult> deleteLocal(ActionSource source) async {
final ids = _getLocalIdsForSource(source);
Future<ActionResult> deleteLocal(ActionSource source, bool backedUpOnly) async {
final List<String> ids;
if (backedUpOnly) {
final assets = _getAssets(source);
ids = assets.where((asset) => asset.storage == AssetState.merged).map((asset) => asset.localId!).toList();
} else {
ids = _getLocalIdsForSource(source);
}
try {
final deletedCount = await _service.deleteLocal(ids);
return ActionResult(count: deletedCount, success: true);

View File

@@ -22,6 +22,7 @@ final assetMediaRepositoryProvider = Provider((ref) => AssetMediaRepository(ref.
class AssetMediaRepository {
final AssetApiRepository _assetApiRepository;
static final Logger _log = Logger("AssetMediaRepository");
const AssetMediaRepository(this._assetApiRepository);

View File

@@ -22,12 +22,12 @@ class DeleteLocalOnlyDialog extends StatelessWidget {
@override
Widget build(BuildContext context) {
void onDeleteBackedUpOnly() {
context.pop();
context.pop(true);
onDeleteLocal(true);
}
void onForceDelete() {
context.pop();
context.pop(false);
onDeleteLocal(false);
}
@@ -36,26 +36,44 @@ class DeleteLocalOnlyDialog extends StatelessWidget {
title: const Text("delete_dialog_title").tr(),
content: const Text("delete_dialog_alert_local_non_backed_up").tr(),
actions: [
TextButton(
onPressed: () => context.pop(),
child: Text(
"cancel",
style: TextStyle(color: context.primaryColor, fontWeight: FontWeight.bold),
).tr(),
SizedBox(
width: double.infinity,
height: 48,
child: FilledButton(
onPressed: () => context.pop(),
style: FilledButton.styleFrom(
backgroundColor: context.colorScheme.surfaceDim,
foregroundColor: context.primaryColor,
),
child: const Text("cancel", style: TextStyle(fontWeight: FontWeight.bold)).tr(),
),
),
TextButton(
onPressed: onDeleteBackedUpOnly,
child: Text(
"delete_local_dialog_ok_backed_up_only",
style: TextStyle(color: context.colorScheme.tertiary, fontWeight: FontWeight.bold),
).tr(),
const SizedBox(height: 8),
SizedBox(
width: double.infinity,
height: 48,
child: FilledButton(
onPressed: onDeleteBackedUpOnly,
style: FilledButton.styleFrom(
backgroundColor: context.colorScheme.errorContainer,
foregroundColor: context.colorScheme.onErrorContainer,
),
child: const Text(
"delete_local_dialog_ok_backed_up_only",
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
),
),
TextButton(
onPressed: onForceDelete,
child: Text(
"delete_local_dialog_ok_force",
style: TextStyle(color: Colors.red[400], fontWeight: FontWeight.bold),
).tr(),
const SizedBox(height: 8),
SizedBox(
width: double.infinity,
height: 48,
child: FilledButton(
onPressed: onForceDelete,
style: FilledButton.styleFrom(backgroundColor: Colors.red[400], foregroundColor: Colors.white),
child: const Text("delete_local_dialog_ok_force", style: TextStyle(fontWeight: FontWeight.bold)).tr(),
),
),
],
);

View File

@@ -263,11 +263,6 @@ Class | Method | HTTP request | Description
*TrashApi* | [**emptyTrash**](doc//TrashApi.md#emptytrash) | **POST** /trash/empty |
*TrashApi* | [**restoreAssets**](doc//TrashApi.md#restoreassets) | **POST** /trash/restore/assets |
*TrashApi* | [**restoreTrash**](doc//TrashApi.md#restoretrash) | **POST** /trash/restore |
*UploadApi* | [**cancelUpload**](doc//UploadApi.md#cancelupload) | **DELETE** /upload/{id} |
*UploadApi* | [**getUploadOptions**](doc//UploadApi.md#getuploadoptions) | **OPTIONS** /upload |
*UploadApi* | [**getUploadStatus**](doc//UploadApi.md#getuploadstatus) | **HEAD** /upload/{id} |
*UploadApi* | [**resumeUpload**](doc//UploadApi.md#resumeupload) | **PATCH** /upload/{id} |
*UploadApi* | [**startUpload**](doc//UploadApi.md#startupload) | **POST** /upload |
*UsersApi* | [**createProfileImage**](doc//UsersApi.md#createprofileimage) | **POST** /users/profile-image |
*UsersApi* | [**deleteProfileImage**](doc//UsersApi.md#deleteprofileimage) | **DELETE** /users/profile-image |
*UsersApi* | [**deleteUserLicense**](doc//UsersApi.md#deleteuserlicense) | **DELETE** /users/me/license |
@@ -577,8 +572,6 @@ Class | Method | HTTP request | Description
- [UpdateAlbumUserDto](doc//UpdateAlbumUserDto.md)
- [UpdateAssetDto](doc//UpdateAssetDto.md)
- [UpdateLibraryDto](doc//UpdateLibraryDto.md)
- [UploadBackupConfig](doc//UploadBackupConfig.md)
- [UploadOkDto](doc//UploadOkDto.md)
- [UsageByUserDto](doc//UsageByUserDto.md)
- [UserAdminCreateDto](doc//UserAdminCreateDto.md)
- [UserAdminDeleteDto](doc//UserAdminDeleteDto.md)

View File

@@ -60,7 +60,6 @@ part 'api/system_metadata_api.dart';
part 'api/tags_api.dart';
part 'api/timeline_api.dart';
part 'api/trash_api.dart';
part 'api/upload_api.dart';
part 'api/users_api.dart';
part 'api/users_admin_api.dart';
part 'api/view_api.dart';
@@ -344,8 +343,6 @@ part 'model/update_album_dto.dart';
part 'model/update_album_user_dto.dart';
part 'model/update_asset_dto.dart';
part 'model/update_library_dto.dart';
part 'model/upload_backup_config.dart';
part 'model/upload_ok_dto.dart';
part 'model/usage_by_user_dto.dart';
part 'model/user_admin_create_dto.dart';
part 'model/user_admin_delete_dto.dart';

View File

@@ -1,379 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadApi {
UploadApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> cancelUploadWithHttpInfo(String id, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<void> cancelUpload(String id, { String? key, String? slug, }) async {
final response = await cancelUploadWithHttpInfo(id, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'OPTIONS /upload' operation and returns the [Response].
Future<Response> getUploadOptionsWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/upload';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'OPTIONS',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
Future<void> getUploadOptions() async {
final response = await getUploadOptionsWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getUploadStatusWithHttpInfo(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'HEAD',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key:
///
/// * [String] slug:
Future<void> getUploadStatus(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
final response = await getUploadStatusWithHttpInfo(id, uploadDraftInteropVersion, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] id (required):
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> resumeUploadWithHttpInfo(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
headerParams[r'upload-offset'] = parameterToString(uploadOffset);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'PATCH',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] id (required):
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key:
///
/// * [String] slug:
Future<UploadOkDto?> resumeUpload(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
final response = await resumeUploadWithHttpInfo(contentLength, id, uploadComplete, uploadDraftInteropVersion, uploadOffset, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
}
return null;
}
/// This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] reprDigest (required):
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
///
/// * [String] xImmichAssetData (required):
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
///
/// * [String] key:
///
/// * [String] slug:
///
/// * [String] uploadComplete:
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion:
/// Indicates the version of the RUFH protocol supported by the client.
Future<Response> startUploadWithHttpInfo(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/upload';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'repr-digest'] = parameterToString(reprDigest);
if (uploadComplete != null) {
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
}
if (uploadDraftInteropVersion != null) {
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
}
headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData);
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] reprDigest (required):
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
///
/// * [String] xImmichAssetData (required):
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
///
/// * [String] key:
///
/// * [String] slug:
///
/// * [String] uploadComplete:
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadDraftInteropVersion:
/// Indicates the version of the RUFH protocol supported by the client.
Future<UploadOkDto?> startUpload(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
final response = await startUploadWithHttpInfo(contentLength, reprDigest, xImmichAssetData, key: key, slug: slug, uploadComplete: uploadComplete, uploadDraftInteropVersion: uploadDraftInteropVersion, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
}
return null;
}
}

View File

@@ -740,10 +740,6 @@ class ApiClient {
return UpdateAssetDto.fromJson(value);
case 'UpdateLibraryDto':
return UpdateLibraryDto.fromJson(value);
case 'UploadBackupConfig':
return UploadBackupConfig.fromJson(value);
case 'UploadOkDto':
return UploadOkDto.fromJson(value);
case 'UsageByUserDto':
return UsageByUserDto.fromJson(value);
case 'UserAdminCreateDto':

View File

@@ -14,31 +14,25 @@ class SystemConfigBackupsDto {
/// Returns a new [SystemConfigBackupsDto] instance.
SystemConfigBackupsDto({
required this.database,
required this.upload,
});
DatabaseBackupConfig database;
UploadBackupConfig upload;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigBackupsDto &&
other.database == database &&
other.upload == upload;
other.database == database;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(database.hashCode) +
(upload.hashCode);
(database.hashCode);
@override
String toString() => 'SystemConfigBackupsDto[database=$database, upload=$upload]';
String toString() => 'SystemConfigBackupsDto[database=$database]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'database'] = this.database;
json[r'upload'] = this.upload;
return json;
}
@@ -52,7 +46,6 @@ class SystemConfigBackupsDto {
return SystemConfigBackupsDto(
database: DatabaseBackupConfig.fromJson(json[r'database'])!,
upload: UploadBackupConfig.fromJson(json[r'upload'])!,
);
}
return null;
@@ -101,7 +94,6 @@ class SystemConfigBackupsDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'database',
'upload',
};
}

View File

@@ -17,7 +17,6 @@ class SystemConfigNightlyTasksDto {
required this.databaseCleanup,
required this.generateMemories,
required this.missingThumbnails,
required this.removeStaleUploads,
required this.startTime,
required this.syncQuotaUsage,
});
@@ -30,8 +29,6 @@ class SystemConfigNightlyTasksDto {
bool missingThumbnails;
bool removeStaleUploads;
String startTime;
bool syncQuotaUsage;
@@ -42,7 +39,6 @@ class SystemConfigNightlyTasksDto {
other.databaseCleanup == databaseCleanup &&
other.generateMemories == generateMemories &&
other.missingThumbnails == missingThumbnails &&
other.removeStaleUploads == removeStaleUploads &&
other.startTime == startTime &&
other.syncQuotaUsage == syncQuotaUsage;
@@ -53,12 +49,11 @@ class SystemConfigNightlyTasksDto {
(databaseCleanup.hashCode) +
(generateMemories.hashCode) +
(missingThumbnails.hashCode) +
(removeStaleUploads.hashCode) +
(startTime.hashCode) +
(syncQuotaUsage.hashCode);
@override
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, removeStaleUploads=$removeStaleUploads, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -66,7 +61,6 @@ class SystemConfigNightlyTasksDto {
json[r'databaseCleanup'] = this.databaseCleanup;
json[r'generateMemories'] = this.generateMemories;
json[r'missingThumbnails'] = this.missingThumbnails;
json[r'removeStaleUploads'] = this.removeStaleUploads;
json[r'startTime'] = this.startTime;
json[r'syncQuotaUsage'] = this.syncQuotaUsage;
return json;
@@ -85,7 +79,6 @@ class SystemConfigNightlyTasksDto {
databaseCleanup: mapValueOfType<bool>(json, r'databaseCleanup')!,
generateMemories: mapValueOfType<bool>(json, r'generateMemories')!,
missingThumbnails: mapValueOfType<bool>(json, r'missingThumbnails')!,
removeStaleUploads: mapValueOfType<bool>(json, r'removeStaleUploads')!,
startTime: mapValueOfType<String>(json, r'startTime')!,
syncQuotaUsage: mapValueOfType<bool>(json, r'syncQuotaUsage')!,
);
@@ -139,7 +132,6 @@ class SystemConfigNightlyTasksDto {
'databaseCleanup',
'generateMemories',
'missingThumbnails',
'removeStaleUploads',
'startTime',
'syncQuotaUsage',
};

View File

@@ -1,100 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadBackupConfig {
/// Returns a new [UploadBackupConfig] instance.
UploadBackupConfig({
required this.maxAgeHours,
});
/// Minimum value: 1
num maxAgeHours;
@override
bool operator ==(Object other) => identical(this, other) || other is UploadBackupConfig &&
other.maxAgeHours == maxAgeHours;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(maxAgeHours.hashCode);
@override
String toString() => 'UploadBackupConfig[maxAgeHours=$maxAgeHours]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'maxAgeHours'] = this.maxAgeHours;
return json;
}
/// Returns a new [UploadBackupConfig] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static UploadBackupConfig? fromJson(dynamic value) {
upgradeDto(value, "UploadBackupConfig");
if (value is Map) {
final json = value.cast<String, dynamic>();
return UploadBackupConfig(
maxAgeHours: num.parse('${json[r'maxAgeHours']}'),
);
}
return null;
}
static List<UploadBackupConfig> listFromJson(dynamic json, {bool growable = false,}) {
final result = <UploadBackupConfig>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = UploadBackupConfig.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, UploadBackupConfig> mapFromJson(dynamic json) {
final map = <String, UploadBackupConfig>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = UploadBackupConfig.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of UploadBackupConfig-objects as value to a dart map
static Map<String, List<UploadBackupConfig>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<UploadBackupConfig>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = UploadBackupConfig.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'maxAgeHours',
};
}

View File

@@ -1,99 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class UploadOkDto {
/// Returns a new [UploadOkDto] instance.
UploadOkDto({
required this.id,
});
String id;
@override
bool operator ==(Object other) => identical(this, other) || other is UploadOkDto &&
other.id == id;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(id.hashCode);
@override
String toString() => 'UploadOkDto[id=$id]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'id'] = this.id;
return json;
}
/// Returns a new [UploadOkDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static UploadOkDto? fromJson(dynamic value) {
upgradeDto(value, "UploadOkDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return UploadOkDto(
id: mapValueOfType<String>(json, r'id')!,
);
}
return null;
}
static List<UploadOkDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <UploadOkDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = UploadOkDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, UploadOkDto> mapFromJson(dynamic json) {
final map = <String, UploadOkDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = UploadOkDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of UploadOkDto-objects as value to a dart map
static Map<String, List<UploadOkDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<UploadOkDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = UploadOkDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'id',
};
}

View File

@@ -106,8 +106,5 @@ abstract class NativeSyncApi {
@TaskQueue(type: TaskQueueType.serialBackgroundThread)
List<HashResult> hashAssets(List<String> assetIds, {bool allowNetworkAccess = false});
@async
bool uploadAsset();
void cancelHashing();
}

View File

@@ -9225,324 +9225,6 @@
"description": "This endpoint requires the `asset.delete` permission."
}
},
"/upload": {
"options": {
"operationId": "getUploadOptions",
"parameters": [],
"responses": {
"204": {
"description": ""
}
},
"tags": [
"Upload"
]
},
"post": {
"operationId": "startUpload",
"parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "repr-digest",
"in": "header",
"description": "RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "x-immich-asset-data",
"in": "header",
"description": "RFC 9651 structured dictionary containing asset metadata with the following keys:\n- device-asset-id (string, required): Unique device asset identifier\n- device-id (string, required): Device identifier\n- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp\n- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp\n- filename (string, required): Original filename\n- is-favorite (boolean, optional): Favorite status\n- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices\n- icloud-id (string, optional): iCloud identifier for assets from iOS devices",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": ""
},
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
}
},
"/upload/{id}": {
"delete": {
"operationId": "cancelUpload",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
},
"head": {
"operationId": "getUploadStatus",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
},
"patch": {
"operationId": "resumeUpload",
"parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "upload-offset",
"in": "header",
"description": "Non-negative byte offset indicating the starting position of the data in the request body within the entire file.",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Upload"
],
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
}
},
"/users": {
"get": {
"operationId": "searchUsers",
@@ -16296,14 +15978,10 @@
"properties": {
"database": {
"$ref": "#/components/schemas/DatabaseBackupConfig"
},
"upload": {
"$ref": "#/components/schemas/UploadBackupConfig"
}
},
"required": [
"database",
"upload"
"database"
],
"type": "object"
},
@@ -16828,9 +16506,6 @@
"missingThumbnails": {
"type": "boolean"
},
"removeStaleUploads": {
"type": "boolean"
},
"startTime": {
"type": "string"
},
@@ -16843,7 +16518,6 @@
"databaseCleanup",
"generateMemories",
"missingThumbnails",
"removeStaleUploads",
"startTime",
"syncQuotaUsage"
],
@@ -17692,29 +17366,6 @@
},
"type": "object"
},
"UploadBackupConfig": {
"properties": {
"maxAgeHours": {
"minimum": 1,
"type": "number"
}
},
"required": [
"maxAgeHours"
],
"type": "object"
},
"UploadOkDto": {
"properties": {
"id": {
"type": "string"
}
},
"required": [
"id"
],
"type": "object"
},
"UsageByUserDto": {
"properties": {
"photos": {

View File

@@ -1309,12 +1309,8 @@ export type DatabaseBackupConfig = {
enabled: boolean;
keepLastAmount: number;
};
export type UploadBackupConfig = {
maxAgeHours: number;
};
export type SystemConfigBackupsDto = {
database: DatabaseBackupConfig;
upload: UploadBackupConfig;
};
export type SystemConfigFFmpegDto = {
accel: TranscodeHWAccel;
@@ -1434,7 +1430,6 @@ export type SystemConfigNightlyTasksDto = {
databaseCleanup: boolean;
generateMemories: boolean;
missingThumbnails: boolean;
removeStaleUploads: boolean;
startTime: string;
syncQuotaUsage: boolean;
};
@@ -1600,9 +1595,6 @@ export type TimeBucketsResponseDto = {
export type TrashResponseDto = {
count: number;
};
export type UploadOkDto = {
id: string;
};
export type UserUpdateMeDto = {
avatarColor?: (UserAvatarColor) | null;
email?: string;
@@ -4425,109 +4417,6 @@ export function restoreAssets({ bulkIdsDto }: {
body: bulkIdsDto
})));
}
export function getUploadOptions(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/upload", {
...opts,
method: "OPTIONS"
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function startUpload({ contentLength, key, reprDigest, slug, uploadComplete, uploadDraftInteropVersion, xImmichAssetData }: {
contentLength: string;
key?: string;
reprDigest: string;
slug?: string;
uploadComplete?: string;
uploadDraftInteropVersion?: string;
xImmichAssetData: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
} | {
status: 201;
}>(`/upload${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "POST",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"repr-digest": reprDigest,
"upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"x-immich-asset-data": xImmichAssetData
})
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function cancelUpload({ id, key, slug }: {
id: string;
key?: string;
slug?: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "DELETE"
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function getUploadStatus({ id, key, slug, uploadDraftInteropVersion }: {
id: string;
key?: string;
slug?: string;
uploadDraftInteropVersion: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "HEAD",
headers: oazapfts.mergeHeaders(opts?.headers, {
"upload-draft-interop-version": uploadDraftInteropVersion
})
}));
}
/**
* This endpoint requires the `asset.upload` permission.
*/
export function resumeUpload({ contentLength, id, key, slug, uploadComplete, uploadDraftInteropVersion, uploadOffset }: {
contentLength: string;
id: string;
key?: string;
slug?: string;
uploadComplete: string;
uploadDraftInteropVersion: string;
uploadOffset: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
}>(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "PATCH",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"upload-offset": uploadOffset
})
}));
}
/**
* This endpoint requires the `user.read` permission.
*/

View File

@@ -3,7 +3,7 @@
"version": "0.0.1",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.18.0+sha512.e804f889f1cecc40d572db084eec3e4881739f8dec69c0ff10d2d1beff9a4e309383ba27b5b750059d7f4c149535b6cd0d2cb1ed3aeb739239a4284a68f40cfa",
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
"engines": {
"pnpm": ">=10.0.0"
}

925
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -44,14 +44,14 @@
"@nestjs/websockets": "^11.0.4",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^2.0.0",
"@opentelemetry/exporter-prometheus": "^0.205.0",
"@opentelemetry/instrumentation-http": "^0.205.0",
"@opentelemetry/instrumentation-ioredis": "^0.53.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.52.0",
"@opentelemetry/instrumentation-pg": "^0.58.0",
"@opentelemetry/exporter-prometheus": "^0.206.0",
"@opentelemetry/instrumentation-http": "^0.206.0",
"@opentelemetry/instrumentation-ioredis": "^0.54.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.53.0",
"@opentelemetry/instrumentation-pg": "^0.59.0",
"@opentelemetry/resources": "^2.0.1",
"@opentelemetry/sdk-metrics": "^2.0.1",
"@opentelemetry/sdk-node": "^0.205.0",
"@opentelemetry/sdk-node": "^0.206.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@react-email/components": "^0.5.0",
"@react-email/render": "^1.1.2",
@@ -78,7 +78,7 @@
"ioredis": "^5.3.2",
"js-yaml": "^4.1.0",
"kysely": "0.28.2",
"kysely-postgres-js": "^2.0.0",
"kysely-postgres-js": "^3.0.0",
"lodash": "^4.17.21",
"luxon": "^3.4.2",
"mnemonist": "^0.40.3",
@@ -104,7 +104,6 @@
"sharp": "^0.34.4",
"sirv": "^3.0.0",
"socket.io": "^4.8.1",
"structured-headers": "^2.0.2",
"tailwindcss-preset-email": "^1.4.0",
"thumbhash": "^0.1.1",
"ua-parser-js": "^2.0.0",

View File

@@ -22,9 +22,6 @@ export interface SystemConfig {
cronExpression: string;
keepLastAmount: number;
};
upload: {
maxAgeHours: number;
};
};
ffmpeg: {
crf: number;
@@ -136,7 +133,6 @@ export interface SystemConfig {
clusterNewFaces: boolean;
generateMemories: boolean;
syncQuotaUsage: boolean;
removeStaleUploads: boolean;
};
trash: {
enabled: boolean;
@@ -194,9 +190,6 @@ export const defaults = Object.freeze<SystemConfig>({
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 23,
@@ -332,7 +325,6 @@ export const defaults = Object.freeze<SystemConfig>({
syncQuotaUsage: true,
missingThumbnails: true,
clusterNewFaces: true,
removeStaleUploads: true,
},
trash: {
enabled: true,

View File

@@ -1,445 +0,0 @@
import { createHash, randomUUID } from 'node:crypto';
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { serializeDictionary } from 'structured-headers';
import request from 'supertest';
import { factory } from 'test/small.factory';
import { ControllerContext, controllerSetup, mockBaseService } from 'test/utils';
const makeAssetData = (overrides?: Partial<any>): string => {
return serializeDictionary({
filename: 'test-image.jpg',
'device-asset-id': 'test-asset-id',
'device-id': 'test-device',
'file-created-at': new Date('2025-01-02T00:00:00Z').toISOString(),
'file-modified-at': new Date('2025-01-01T00:00:00Z').toISOString(),
'is-favorite': false,
...overrides,
});
};
describe(AssetUploadController.name, () => {
let ctx: ControllerContext;
let buffer: Buffer;
let checksum: string;
const service = mockBaseService(AssetUploadService);
beforeAll(async () => {
ctx = await controllerSetup(AssetUploadController, [{ provide: AssetUploadService, useValue: service }]);
return () => ctx.close();
});
beforeEach(() => {
service.resetAllMocks();
service.startUpload.mockImplementation((_, __, res, ___) => {
res.send();
return Promise.resolve();
});
service.resumeUpload.mockImplementation((_, __, res, ___, ____) => {
res.send();
return Promise.resolve();
});
service.cancelUpload.mockImplementation((_, __, res) => {
res.send();
return Promise.resolve();
});
service.getUploadStatus.mockImplementation((_, res, __, ___) => {
res.send();
return Promise.resolve();
});
ctx.reset();
buffer = Buffer.from(randomUUID());
checksum = `sha=:${createHash('sha1').update(buffer).digest('base64')}:`;
});
describe('POST /upload', () => {
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).post('/upload');
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require at least version 3 of Upload-Draft-Interop-Version header if provided', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Upload-Draft-Interop-Version', '2')
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must not be less than 3']),
}),
);
});
it('should require X-Immich-Asset-Data header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'x-immich-asset-data header is required' }));
});
it('should require Repr-Digest header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Missing repr-digest header' }));
});
it('should allow conventional upload without Upload-Complete header', async () => {
const { status } = await request(ctx.getHttpServer())
.post('/upload')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(201);
});
it('should require Upload-Length header for incomplete upload', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?0')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Missing upload-length header' }));
});
it('should infer upload length from content length if complete upload', async () => {
const { status } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.send(buffer);
expect(status).toBe(201);
});
it('should reject invalid Repr-Digest format', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', checksum)
.set('Repr-Digest', 'invalid-format')
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Invalid repr-digest header' }));
});
it('should validate device-asset-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceAssetId')]),
}),
);
});
it('should validate device-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-asset-id': 'test-asset',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceId')]),
}),
);
});
it('should validate filename is required in asset data', async () => {
const assetData = serializeDictionary({
'device-asset-id': 'test-asset',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('filename')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { body, status } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '3')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Incomplete', '?0')
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(body).toEqual({});
expect(status).not.toBe(400);
});
it('should validate Upload-Complete is a boolean structured field', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', 'true')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'upload-complete must be a structured boolean value' }));
});
it('should validate Upload-Length is a positive integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '-100')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadLength must not be less than 1']),
}),
);
});
});
describe('PATCH /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).patch(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Offset', '0')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must be an integer number', 'version must not be less than 3']),
}),
);
});
it('should require Upload-Offset header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([
'uploadOffset must be an integer number',
'uploadOffset must not be less than 0',
]),
}),
);
});
it('should require Upload-Complete header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.set('Content-Type', 'application/partial-upload')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['uploadComplete must be a boolean value'] }));
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
it('should validate Upload-Offset is a non-negative integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '-50')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadOffset must not be less than 0']),
}),
);
});
it('should require Content-Type: application/partial-upload for version >= 6', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '6')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.set('Content-Type', 'application/octet-stream')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: ['contentType must be equal to application/partial-upload'],
}),
);
});
it('should allow other Content-Type for version < 6', async () => {
const { body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.set('Content-Type', 'application/octet-stream')
.send();
// Will fail for other reasons, but not content-type validation
expect(body).not.toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('contentType')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { status } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.send();
// Should not fail validation
expect(status).not.toBe(400);
});
});
describe('DELETE /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).delete(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer()).delete('/upload/invalid-uuid');
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
});
describe('HEAD /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status } = await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(status).toBe(400);
});
it('should validate UUID parameter', async () => {
const { status } = await request(ctx.getHttpServer())
.head('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(400);
});
});
});

View File

@@ -1,108 +0,0 @@
import { Controller, Delete, Head, HttpCode, HttpStatus, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import { GetUploadStatusDto, Header, ResumeUploadDto, StartUploadDto, UploadOkDto } from 'src/dtos/asset-upload.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ImmichHeader, Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { validateSyncOrReject } from 'src/utils/request';
import { UUIDParamDto } from 'src/validation';
const apiInteropVersion = {
name: Header.InteropVersion,
description: `Indicates the version of the RUFH protocol supported by the client.`,
required: true,
};
const apiUploadComplete = {
name: Header.UploadComplete,
description:
'Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.',
required: true,
};
const apiContentLength = {
name: Header.ContentLength,
description: 'Non-negative size of the request body in bytes.',
required: true,
};
// This is important to let go of the asset lock for an inactive request
const SOCKET_TIMEOUT_MS = 30_000;
@ApiTags('Upload')
@Controller('upload')
export class AssetUploadController {
constructor(private service: AssetUploadService) {}
@Post()
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader({
name: ImmichHeader.AssetData,
description: `RFC 9651 structured dictionary containing asset metadata with the following keys:
- device-asset-id (string, required): Unique device asset identifier
- device-id (string, required): Device identifier
- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp
- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp
- filename (string, required): Original filename
- is-favorite (boolean, optional): Favorite status
- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices
- icloud-id (string, optional): iCloud identifier for assets from iOS devices`,
required: true,
example:
'device-asset-id="abc123", device-id="phone1", filename="photo.jpg", file-created-at="2024-01-01T00:00:00Z", file-modified-at="2024-01-01T00:00:00Z"',
})
@ApiHeader({
name: Header.ReprDigest,
description:
'RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.',
required: true,
})
@ApiHeader({ ...apiInteropVersion, required: false })
@ApiHeader({ ...apiUploadComplete, required: false })
@ApiHeader(apiContentLength)
@ApiOkResponse({ type: UploadOkDto })
startUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response): Promise<void> {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.startUpload(auth, req, res, validateSyncOrReject(StartUploadDto, req.headers));
}
@Patch(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader({
name: Header.UploadOffset,
description:
'Non-negative byte offset indicating the starting position of the data in the request body within the entire file.',
required: true,
})
@ApiHeader(apiInteropVersion)
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
@ApiOkResponse({ type: UploadOkDto })
resumeUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.resumeUpload(auth, req, res, id, validateSyncOrReject(ResumeUploadDto, req.headers));
}
@Delete(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
cancelUpload(@Auth() auth: AuthDto, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.cancelUpload(auth, id, res);
}
@Head(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader(apiInteropVersion)
getUploadStatus(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
res.setTimeout(SOCKET_TIMEOUT_MS);
return this.service.getUploadStatus(auth, res, id, validateSyncOrReject(GetUploadStatusDto, req.headers));
}
@Options()
@HttpCode(HttpStatus.NO_CONTENT)
getUploadOptions(@Res() res: Response) {
return this.service.getUploadOptions(res);
}
}

View File

@@ -3,7 +3,6 @@ import { AlbumController } from 'src/controllers/album.controller';
import { ApiKeyController } from 'src/controllers/api-key.controller';
import { AppController } from 'src/controllers/app.controller';
import { AssetMediaController } from 'src/controllers/asset-media.controller';
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetController } from 'src/controllers/asset.controller';
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
import { AuthController } from 'src/controllers/auth.controller';
@@ -41,7 +40,6 @@ export const controllers = [
AppController,
AssetController,
AssetMediaController,
AssetUploadController,
AuthController,
AuthAdminController,
DownloadController,

View File

@@ -1,196 +0,0 @@
import { BadRequestException } from '@nestjs/common';
import { ApiProperty } from '@nestjs/swagger';
import { Expose, plainToInstance, Transform, Type } from 'class-transformer';
import { Equals, IsBoolean, IsInt, IsNotEmpty, IsString, Min, ValidateIf, ValidateNested } from 'class-validator';
import { ImmichHeader } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate } from 'src/validation';
import { parseDictionary } from 'structured-headers';
export enum Header {
ContentLength = 'content-length',
ContentType = 'content-type',
InteropVersion = 'upload-draft-interop-version',
ReprDigest = 'repr-digest',
UploadComplete = 'upload-complete',
UploadIncomplete = 'upload-incomplete',
UploadLength = 'upload-length',
UploadOffset = 'upload-offset',
}
export class UploadAssetDataDto {
@IsNotEmpty()
@IsString()
deviceAssetId!: string;
@IsNotEmpty()
@IsString()
deviceId!: string;
@ValidateDate()
fileCreatedAt!: Date;
@ValidateDate()
fileModifiedAt!: Date;
@IsString()
@IsNotEmpty()
filename!: string;
@ValidateBoolean({ optional: true })
isFavorite?: boolean;
@Optional()
@IsString()
@IsNotEmpty()
livePhotoVideoId?: string;
@Optional()
@IsString()
@IsNotEmpty()
iCloudId!: string;
}
export class BaseUploadHeadersDto {
@Expose({ name: Header.ContentLength })
@Min(0)
@IsInt()
@Type(() => Number)
contentLength!: number;
}
export class StartUploadDto extends BaseUploadHeadersDto {
@Expose({ name: Header.InteropVersion })
@Optional()
@Min(3)
@IsInt()
@Type(() => Number)
version?: number;
@Expose({ name: ImmichHeader.AssetData })
@ValidateNested()
@Transform(({ value }) => {
if (!value) {
throw new BadRequestException(`${ImmichHeader.AssetData} header is required`);
}
try {
const dict = parseDictionary(value);
return plainToInstance(UploadAssetDataDto, {
deviceAssetId: dict.get('device-asset-id')?.[0],
deviceId: dict.get('device-id')?.[0],
filename: dict.get('filename')?.[0],
duration: dict.get('duration')?.[0],
fileCreatedAt: dict.get('file-created-at')?.[0],
fileModifiedAt: dict.get('file-modified-at')?.[0],
isFavorite: dict.get('is-favorite')?.[0],
livePhotoVideoId: dict.get('live-photo-video-id')?.[0],
iCloudId: dict.get('icloud-id')?.[0],
});
} catch {
throw new BadRequestException(`${ImmichHeader.AssetData} must be a valid structured dictionary`);
}
})
assetData!: UploadAssetDataDto;
@Expose({ name: Header.ReprDigest })
@Transform(({ value }) => {
if (!value) {
throw new BadRequestException(`Missing ${Header.ReprDigest} header`);
}
const checksum = parseDictionary(value).get('sha')?.[0];
if (checksum instanceof ArrayBuffer && checksum.byteLength === 20) {
return Buffer.from(checksum);
}
throw new BadRequestException(`Invalid ${Header.ReprDigest} header`);
})
checksum!: Buffer;
@Expose()
@Min(1)
@IsInt()
@Transform(({ obj }) => {
const uploadLength = obj[Header.UploadLength];
if (uploadLength != undefined) {
return Number(uploadLength);
}
const contentLength = obj[Header.ContentLength];
if (contentLength && isUploadComplete(obj) !== false) {
return Number(contentLength);
}
throw new BadRequestException(`Missing ${Header.UploadLength} header`);
})
uploadLength!: number;
@Expose()
@Transform(({ obj }) => isUploadComplete(obj))
uploadComplete?: boolean;
}
export class ResumeUploadDto extends BaseUploadHeadersDto {
@Expose({ name: Header.InteropVersion })
@Min(3)
@IsInt()
@Type(() => Number)
version!: number;
@Expose({ name: Header.ContentType })
@ValidateIf((o) => o.version && o.version >= 6)
@Equals('application/partial-upload')
contentType!: string;
@Expose({ name: Header.UploadLength })
@Min(1)
@IsInt()
@Type(() => Number)
@Optional()
uploadLength?: number;
@Expose({ name: Header.UploadOffset })
@Min(0)
@IsInt()
@Type(() => Number)
uploadOffset!: number;
@Expose()
@IsBoolean()
@Transform(({ obj }) => isUploadComplete(obj))
uploadComplete!: boolean;
}
export class GetUploadStatusDto {
@Expose({ name: Header.InteropVersion })
@Min(3)
@IsInt()
@Type(() => Number)
version!: number;
}
export class UploadOkDto {
@ApiProperty()
id!: string;
}
const STRUCTURED_TRUE = '?1';
const STRUCTURED_FALSE = '?0';
function isUploadComplete(obj: any) {
const uploadComplete = obj[Header.UploadComplete];
if (uploadComplete === STRUCTURED_TRUE) {
return true;
} else if (uploadComplete === STRUCTURED_FALSE) {
return false;
} else if (uploadComplete !== undefined) {
throw new BadRequestException('upload-complete must be a structured boolean value');
}
const uploadIncomplete = obj[Header.UploadIncomplete];
if (uploadIncomplete === STRUCTURED_TRUE) {
return false;
} else if (uploadIncomplete === STRUCTURED_FALSE) {
return true;
} else if (uploadComplete !== undefined) {
throw new BadRequestException('upload-incomplete must be a structured boolean value');
}
}

View File

@@ -55,23 +55,11 @@ export class DatabaseBackupConfig {
keepLastAmount!: number;
}
export class UploadBackupConfig {
@IsInt()
@IsPositive()
@IsNotEmpty()
maxAgeHours!: number;
}
export class SystemConfigBackupsDto {
@Type(() => DatabaseBackupConfig)
@ValidateNested()
@IsObject()
database!: DatabaseBackupConfig;
@Type(() => UploadBackupConfig)
@ValidateNested()
@IsObject()
upload!: UploadBackupConfig;
}
export class SystemConfigFFmpegDto {
@@ -356,9 +344,6 @@ class SystemConfigNightlyTasksDto {
@ValidateBoolean()
syncQuotaUsage!: boolean;
@ValidateBoolean()
removeStaleUploads!: boolean;
}
class SystemConfigOAuthDto {

View File

@@ -20,7 +20,6 @@ export enum ImmichHeader {
SharedLinkSlug = 'x-immich-share-slug',
Checksum = 'x-immich-checksum',
Cid = 'x-immich-cid',
AssetData = 'x-immich-asset-data',
}
export enum ImmichQuery {
@@ -304,7 +303,6 @@ export enum AssetStatus {
Active = 'active',
Trashed = 'trashed',
Deleted = 'deleted',
Partial = 'partial',
}
export enum SourceType {
@@ -495,7 +493,6 @@ export enum BootstrapEventPriority {
JobService = -190,
// Initialise config after other bootstrap services, stop other services from using config on bootstrap
SystemConfig = 100,
UploadService = 200,
}
export enum QueueName {
@@ -531,8 +528,6 @@ export enum JobName {
AssetFileMigration = 'AssetFileMigration',
AssetGenerateThumbnailsQueueAll = 'AssetGenerateThumbnailsQueueAll',
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
PartialAssetCleanup = 'PartialAssetCleanup',
PartialAssetCleanupQueueAll = 'PartialAssetCleanupQueueAll',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',

View File

@@ -14,7 +14,6 @@ from
left join "smart_search" on "asset"."id" = "smart_search"."assetId"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -41,7 +40,6 @@ from
"asset"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -54,7 +52,6 @@ from
"asset"
where
"asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit
$2
@@ -81,8 +78,7 @@ from
"asset"
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
where
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
"asset"."deletedAt" is null
and "asset"."visibility" != $1
and (
"asset_job_status"."previewAt" is null
@@ -114,7 +110,6 @@ from
"asset"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForGenerateThumbnailJob
select
@@ -146,7 +141,6 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForMetadataExtraction
select
@@ -184,7 +178,6 @@ from
"asset"
where
"asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getAlbumThumbnailFiles
select
@@ -205,8 +198,7 @@ from
inner join "smart_search" on "asset"."id" = "smart_search"."assetId"
inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "asset"."id"
where
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
"asset"."deletedAt" is null
and "asset"."visibility" in ('archive', 'timeline')
and "job_status"."duplicatesDetectedAt" is null
@@ -218,7 +210,6 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where
"asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null
and not exists (
@@ -253,7 +244,6 @@ from
"asset"
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForDetectFacesJob
select
@@ -294,7 +284,6 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForSyncAssets
select
@@ -379,7 +368,6 @@ from
) as "stacked_assets" on "stack"."id" is not null
where
"asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForVideoConversion
select
@@ -393,7 +381,6 @@ where
or "asset"."encodedVideoPath" = $2
)
and "asset"."visibility" != $3
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion
@@ -407,7 +394,6 @@ from
where
"asset"."id" = $1
and "asset"."type" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForMetadataExtraction
select
@@ -420,7 +406,6 @@ where
"asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null
)
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob
@@ -441,8 +426,7 @@ from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
"asset"."deletedAt" is null
and "asset"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob
@@ -463,8 +447,7 @@ from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
"asset"."deletedAt" is null
-- AssetJobRepository.streamForDeletedJob
select
@@ -474,7 +457,6 @@ from
"asset"
where
"asset"."deletedAt" <= $1
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForSidecar
select
@@ -487,7 +469,6 @@ where
or "asset"."sidecarPath" is null
)
and "asset"."visibility" != $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForDetectFacesJob
select
@@ -497,10 +478,8 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where
"asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null
and "asset"."status" != 'partial'
order by
"asset"."fileCreatedAt" desc
@@ -510,14 +489,4 @@ select
from
"asset"
where
"asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.streamForPartialAssetCleanupJob
select
"id"
from
"asset"
where
"asset"."status" = 'partial'
and "asset"."createdAt" < $1
"asset"."deletedAt" is null

View File

@@ -46,68 +46,6 @@ where
"assetId" = $1
and "key" = $2
-- AssetRepository.getCompletionMetadata
select
"originalPath" as "path",
"status",
"fileModifiedAt",
"createdAt",
"checksum",
"fileSizeInByte" as "size"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"id" = $1
and "ownerId" = $2
-- AssetRepository.setComplete
update "asset" as "complete_asset"
set
"status" = 'active',
"visibility" = case
when (
"complete_asset"."type" = 'VIDEO'
and exists (
select
from
"asset"
where
"complete_asset"."id" = "asset"."livePhotoVideoId"
)
) then 'hidden'::asset_visibility_enum
else 'timeline'::asset_visibility_enum
end
where
"id" = $1
and "status" = 'partial'
-- AssetRepository.removeAndDecrementQuota
with
"asset_exif" as (
select
"fileSizeInByte"
from
"asset_exif"
where
"assetId" = $1
),
"asset" as (
delete from "asset"
where
"id" = $2
returning
"ownerId"
)
update "user"
set
"quotaUsageInBytes" = "quotaUsageInBytes" - "fileSizeInByte"
from
"asset_exif",
"asset"
where
"user"."id" = "asset"."ownerId"
-- AssetRepository.getByDayOfYear
with
"res" as (
@@ -320,9 +258,7 @@ where
-- AssetRepository.getUploadAssetIdByChecksum
select
"id",
"status",
"createdAt"
"id"
from
"asset"
where

View File

@@ -1,10 +1,10 @@
import { Injectable } from '@nestjs/common';
import { Kysely, sql } from 'kysely';
import { Kysely } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { Asset, columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema';
import { StorageAsset } from 'src/types';
import {
@@ -28,7 +28,6 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.leftJoin('smart_search', 'asset.id', 'smart_search.assetId')
.select(['id', 'type', 'ownerId', 'duplicateId', 'stackId', 'visibility', 'smart_search.embedding'])
.limit(1)
@@ -40,7 +39,6 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.select(['id', 'sidecarPath', 'originalPath'])
.select((eb) =>
jsonArrayFrom(
@@ -60,7 +58,6 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.select(['id', 'sidecarPath', 'originalPath'])
.limit(1)
.executeTakeFirst();
@@ -72,7 +69,6 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id', 'asset.thumbhash'])
.select(withFiles)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.$if(!force, (qb) =>
@@ -97,7 +93,6 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.ownerId', 'asset.encodedVideoPath'])
.select(withFiles)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -117,7 +112,6 @@ export class AssetJobRepository {
.select(withFiles)
.$call(withExifInner)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -128,7 +122,6 @@ export class AssetJobRepository {
.select(columns.asset)
.select(withFaces)
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -146,7 +139,6 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.innerJoin('asset_job_status as job_status', 'assetId', 'asset.id')
.where('job_status.previewAt', 'is not', null);
@@ -157,7 +149,6 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.select(['asset.id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.innerJoin('smart_search', 'asset.id', 'smart_search.assetId')
.$call(withDefaultVisibility)
@@ -186,7 +177,6 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.visibility'])
.select((eb) => withFiles(eb, AssetFileType.Preview))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -199,7 +189,6 @@ export class AssetJobRepository {
.select((eb) => withFaces(eb, true))
.select((eb) => withFiles(eb, AssetFileType.Preview))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -252,7 +241,6 @@ export class AssetJobRepository {
)
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
.where('asset.id', '=', id)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -267,7 +255,6 @@ export class AssetJobRepository {
.where((eb) => eb.or([eb('asset.encodedVideoPath', 'is', null), eb('asset.encodedVideoPath', '=', '')]))
.where('asset.visibility', '!=', AssetVisibility.Hidden),
)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
@@ -279,7 +266,6 @@ export class AssetJobRepository {
.select(['asset.id', 'asset.ownerId', 'asset.originalPath', 'asset.encodedVideoPath'])
.where('asset.id', '=', id)
.where('asset.type', '=', AssetType.Video)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@@ -295,7 +281,6 @@ export class AssetJobRepository {
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
),
)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
@@ -318,7 +303,6 @@ export class AssetJobRepository {
'asset_exif.timeZone',
'asset_exif.fileSizeInByte',
])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null);
}
@@ -340,7 +324,6 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['id', 'isOffline'])
.where('asset.deletedAt', '<=', trashedBefore)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.stream();
}
@@ -353,7 +336,6 @@ export class AssetJobRepository {
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.stream();
}
@@ -362,38 +344,12 @@ export class AssetJobRepository {
return this.assetsWithPreviews()
.$if(force === false, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
.select(['asset.id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.orderBy('asset.fileCreatedAt', 'desc')
.stream();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForMigrationJob() {
return this.db
.selectFrom('asset')
.select(['id'])
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
.where('asset.deletedAt', 'is', null)
.stream();
}
getForPartialAssetCleanupJob(assetId: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select(['originalPath as path', 'fileSizeInByte as size', 'checksum', 'fileModifiedAt'])
.where('id', '=', assetId)
.where('status', '=', sql.lit(AssetStatus.Partial))
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForPartialAssetCleanupJob(createdBefore: Date) {
return this.db
.selectFrom('asset')
.select(['id'])
.where('asset.status', '=', sql.lit(AssetStatus.Partial))
.where('asset.createdAt', '<', createdBefore)
.stream();
return this.db.selectFrom('asset').select(['id']).where('asset.deletedAt', 'is', null).stream();
}
}

View File

@@ -253,96 +253,6 @@ export class AssetRepository {
return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow();
}
createWithMetadata(asset: Insertable<AssetTable> & { id: string }, size: number, metadata?: AssetMetadataItem[]) {
let query = this.db;
if (asset.livePhotoVideoId) {
(query as any) = query.with('motion_asset', (qb) =>
qb
.updateTable('asset')
.set({ visibility: AssetVisibility.Hidden })
.where('id', '=', asset.livePhotoVideoId!)
.where('type', '=', sql.lit(AssetType.Video))
.where('ownerId', '=', asset.ownerId)
.returning('id'),
);
}
(query as any) = query
.with('asset', (qb) =>
qb
.insertInto('asset')
.values(
asset.livePhotoVideoId ? { ...asset, livePhotoVideoId: sql<string>`(select id from motion_asset)` } : asset,
)
.returning(['id', 'ownerId']),
)
.with('exif', (qb) =>
qb
.insertInto('asset_exif')
.columns(['assetId', 'fileSizeInByte'])
.expression((eb) => eb.selectFrom('asset').select(['asset.id', eb.val(size).as('fileSizeInByte')])),
);
if (metadata && metadata.length > 0) {
(query as any) = query.with('metadata', (qb) =>
qb.insertInto('asset_metadata').values(metadata.map(({ key, value }) => ({ assetId: asset.id, key, value }))),
);
}
return query
.updateTable('user')
.from('asset')
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" + ${size}` })
.whereRef('user.id', '=', 'asset.ownerId')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
getCompletionMetadata(assetId: string, ownerId: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select(['originalPath as path', 'status', 'fileModifiedAt', 'createdAt', 'checksum', 'fileSizeInByte as size'])
.where('id', '=', assetId)
.where('ownerId', '=', ownerId)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async setComplete(assetId: string) {
await this.db
.updateTable('asset as complete_asset')
.set((eb) => ({
status: sql.lit(AssetStatus.Active),
visibility: eb
.case()
.when(
eb.and([
eb('complete_asset.type', '=', sql.lit(AssetType.Video)),
eb.exists(eb.selectFrom('asset').whereRef('complete_asset.id', '=', 'asset.livePhotoVideoId')),
]),
)
.then(sql<AssetVisibility>`'hidden'::asset_visibility_enum`)
.else(sql<AssetVisibility>`'timeline'::asset_visibility_enum`)
.end(),
}))
.where('id', '=', assetId)
.where('status', '=', sql.lit(AssetStatus.Partial))
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
async removeAndDecrementQuota(id: string): Promise<void> {
await this.db
.with('asset_exif', (qb) => qb.selectFrom('asset_exif').where('assetId', '=', id).select('fileSizeInByte'))
.with('asset', (qb) => qb.deleteFrom('asset').where('id', '=', id).returning('ownerId'))
.updateTable('user')
.from(['asset_exif', 'asset'])
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" - "fileSizeInByte"` })
.whereRef('user.id', '=', 'asset.ownerId')
.execute();
}
createAll(assets: Insertable<AssetTable>[]) {
return this.db.insertInto('asset').values(assets).returningAll().execute();
}
@@ -582,15 +492,17 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer) {
return this.db
async getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer): Promise<string | undefined> {
const asset = await this.db
.selectFrom('asset')
.select(['id', 'status', 'createdAt'])
.select('id')
.where('ownerId', '=', asUuid(ownerId))
.where('checksum', '=', checksum)
.where('libraryId', 'is', null)
.limit(1)
.executeTakeFirst();
return asset?.id;
}
findLivePhotoMatch(options: LivePhotoSearchOptions) {

View File

@@ -451,20 +451,6 @@ export class DatabaseRepository {
return res as R;
}
async withUuidLock<R>(uuid: string, callback: () => Promise<R>): Promise<R> {
let res;
await this.db.connection().execute(async (connection) => {
try {
await this.acquireUuidLock(uuid, connection);
res = await callback();
} finally {
await this.releaseUuidLock(uuid, connection);
}
});
return res as R;
}
tryLock(lock: DatabaseLock): Promise<boolean> {
return this.db.connection().execute(async (connection) => this.acquireTryLock(lock, connection));
}
@@ -481,10 +467,6 @@ export class DatabaseRepository {
await sql`SELECT pg_advisory_lock(${lock})`.execute(connection);
}
private async acquireUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_lock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
}
private async acquireTryLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<boolean> {
const { rows } = await sql<{
pg_try_advisory_lock: boolean;
@@ -495,8 +477,4 @@ export class DatabaseRepository {
private async releaseLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_unlock(${lock})`.execute(connection);
}
private async releaseUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
await sql`SELECT pg_advisory_unlock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
}
}

View File

@@ -80,9 +80,6 @@ type EventMap = {
// stack bulk events
StackDeleteAll: [{ stackIds: string[]; userId: string }];
// upload events
UploadAbort: [{ assetId: string; abortTime: Date }];
// user events
UserSignup: [{ notify: boolean; id: string; password?: string }];
@@ -90,7 +87,7 @@ type EventMap = {
WebsocketConnect: [{ userId: string }];
};
export const serverEvents = ['ConfigUpdate', 'UploadAbort'] as const;
export const serverEvents = ['ConfigUpdate'] as const;
export type ServerEvents = (typeof serverEvents)[number];
export type EmitEvent = keyof EventMap;

View File

@@ -62,11 +62,7 @@ export class StorageRepository {
}
createWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'w', highWaterMark: 1024 * 1024 });
}
createOrAppendWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'a', highWaterMark: 1024 * 1024 });
return createWriteStream(filepath, { flags: 'w' });
}
createOrOverwriteFile(filepath: string, buffer: Buffer) {
@@ -160,13 +156,10 @@ export class StorageRepository {
}
}
mkdir(filepath: string): Promise<string | undefined> {
return fs.mkdir(filepath, { recursive: true });
}
mkdirSync(filepath: string): void {
// does not throw an error if the folder already exists
mkdirSync(filepath, { recursive: true });
if (!existsSync(filepath)) {
mkdirSync(filepath, { recursive: true });
}
}
existsSync(filepath: string) {

View File

@@ -1,9 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TYPE "assets_status_enum" ADD VALUE IF NOT EXISTS 'partial'`.execute(db);
}
export async function down(): Promise<void> {
// Cannot remove enum values in PostgreSQL
}

View File

@@ -215,11 +215,7 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('hex'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@@ -228,11 +224,7 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset by base64', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('base64'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@@ -386,11 +378,7 @@ describe(AssetMediaService.name, () => {
(error as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.create.mockRejectedValue(error);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: assetEntity.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(assetEntity.id);
await expect(sut.uploadAsset(authStub.user1, createDto, file)).resolves.toEqual({
id: 'id_1',
@@ -815,11 +803,7 @@ describe(AssetMediaService.name, () => {
mocks.asset.update.mockRejectedValue(error);
mocks.asset.getById.mockResolvedValueOnce(sidecarAsset);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: sidecarAsset.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(sidecarAsset.id);
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([sidecarAsset.id]));
// this is the original file size
mocks.storage.stat.mockResolvedValue({ size: 0 } as Stats);

View File

@@ -43,12 +43,12 @@ export class AssetMediaService extends BaseService {
return;
}
const asset = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
if (!asset) {
const assetId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
if (!assetId) {
return;
}
return { id: asset.id, status: AssetMediaStatus.DUPLICATE };
return { id: assetId, status: AssetMediaStatus.DUPLICATE };
}
canUploadFile({ auth, fieldName, file }: UploadRequest): true {
@@ -165,10 +165,6 @@ export class AssetMediaService extends BaseService {
throw new Error('Asset not found');
}
if (asset.status === AssetStatus.Partial) {
throw new BadRequestException('Cannot replace a partial asset');
}
this.requireQuota(auth, file.size);
await this.replaceFileData(asset.id, dto, file, sidecarFile?.originalPath);
@@ -317,12 +313,12 @@ export class AssetMediaService extends BaseService {
// handle duplicates with a success response
if (isAssetChecksumConstraint(error)) {
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
if (!duplicate) {
const duplicateId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
if (!duplicateId) {
this.logger.error(`Error locating duplicate for checksum constraint`);
throw new InternalServerErrorException();
}
return { status: AssetMediaStatus.DUPLICATE, id: duplicate.id };
return { status: AssetMediaStatus.DUPLICATE, id: duplicateId };
}
this.logger.error(`Error uploading file ${error}`, error?.stack);

View File

@@ -1,456 +0,0 @@
import { BadRequestException, InternalServerErrorException } from '@nestjs/common';
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
import { authStub } from 'test/fixtures/auth.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(AssetUploadService.name, () => {
let sut: AssetUploadService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(AssetUploadService));
});
describe('onStart', () => {
const mockDto = {
assetData: {
filename: 'test.jpg',
deviceAssetId: 'device-asset-1',
deviceId: 'device-1',
fileCreatedAt: new Date('2025-01-01T00:00:00Z'),
fileModifiedAt: new Date('2025-01-01T12:00:00Z'),
isFavorite: false,
iCloudId: '',
},
checksum: Buffer.from('checksum'),
uploadLength: 1024,
uploadComplete: true,
contentLength: 1024,
isComplete: true,
version: 8,
};
it('should create a new asset and return upload metadata', async () => {
const assetId = factory.uuid();
mocks.crypto.randomUUID.mockReturnValue(assetId);
const result = await sut.onStart(authStub.user1, mockDto);
expect(result).toEqual({
id: assetId,
path: expect.stringContaining(assetId),
status: AssetStatus.Partial,
isDuplicate: false,
});
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
id: assetId,
ownerId: authStub.user1.user.id,
checksum: mockDto.checksum,
deviceAssetId: mockDto.assetData.deviceAssetId,
deviceId: mockDto.assetData.deviceId,
fileCreatedAt: mockDto.assetData.fileCreatedAt,
fileModifiedAt: mockDto.assetData.fileModifiedAt,
type: AssetType.Image,
isFavorite: false,
status: AssetStatus.Partial,
visibility: AssetVisibility.Hidden,
originalFileName: 'test.jpg',
}),
1024,
undefined,
);
});
it('should determine asset type from filename extension', async () => {
const videoDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'video.mp4' } };
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, videoDto);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
type: AssetType.Video,
}),
expect.anything(),
undefined,
);
});
it('should throw BadRequestException for unsupported file types', async () => {
const unsupportedDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'document.xyz' } };
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow(BadRequestException);
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow('unsupported file type');
});
it('should validate quota before creating asset', async () => {
const authWithQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: 2000,
quotaUsageInBytes: 1500,
},
};
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow(BadRequestException);
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow('Quota has been exceeded');
});
it('should allow upload when quota is null (unlimited)', async () => {
const authWithUnlimitedQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: null,
quotaUsageInBytes: 1000,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await expect(sut.onStart(authWithUnlimitedQuota, mockDto)).resolves.toBeDefined();
});
it('should allow upload when within quota', async () => {
const authWithQuota = {
...authStub.user1,
user: {
...authStub.user1.user,
quotaSizeInBytes: 5000,
quotaUsageInBytes: 1000,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
const result = await sut.onStart(authWithQuota, mockDto);
expect(result.isDuplicate).toBe(false);
});
it('should handle duplicate detection via checksum constraint', async () => {
const existingAssetId = factory.uuid();
const checksumError = new Error('duplicate key value violates unique constraint');
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: existingAssetId,
status: AssetStatus.Partial,
createdAt: new Date(),
});
const result = await sut.onStart(authStub.user1, mockDto);
expect(result).toEqual({
id: existingAssetId,
path: expect.any(String),
status: AssetStatus.Partial,
isDuplicate: true,
});
expect(mocks.asset.getUploadAssetIdByChecksum).toHaveBeenCalledWith(authStub.user1.user.id, mockDto.checksum);
});
it('should throw InternalServerErrorException if duplicate lookup fails', async () => {
const checksumError = new Error('duplicate key value violates unique constraint');
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(undefined);
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
});
it('should throw InternalServerErrorException for non-checksum errors', async () => {
const genericError = new Error('database connection failed');
mocks.asset.createWithMetadata.mockRejectedValue(genericError);
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
});
it('should include iCloud metadata when provided', async () => {
const dtoWithICloud = {
...mockDto,
assetData: {
...mockDto.assetData,
iCloudId: 'icloud-123',
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, dtoWithICloud);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(expect.anything(), expect.anything(), [
{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'icloud-123' } },
]);
});
it('should set isFavorite when true', async () => {
const favoriteDto = {
...mockDto,
assetData: {
...mockDto.assetData,
isFavorite: true,
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, favoriteDto);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
isFavorite: true,
}),
expect.anything(),
undefined,
);
});
});
describe('onComplete', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
const fileModifiedAt = new Date('2025-01-01T12:00:00Z');
it('should mark asset as complete and queue metadata extraction job', async () => {
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.AssetExtractMetadata,
data: { id: assetId, source: 'upload' },
});
});
it('should update file modification time', async () => {
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.storage.utimes).toHaveBeenCalledWith(path, expect.any(Date), fileModifiedAt);
});
it('should handle utimes failure gracefully', async () => {
mocks.storage.utimes.mockRejectedValue(new Error('Permission denied'));
await expect(sut.onComplete({ id: assetId, path, fileModifiedAt })).resolves.toBeUndefined();
// Should still complete asset and queue job
expect(mocks.asset.setComplete).toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalled();
});
it('should retry setComplete on transient failures', async () => {
mocks.asset.setComplete
.mockRejectedValueOnce(new Error('Transient error'))
.mockRejectedValueOnce(new Error('Transient error'))
.mockResolvedValue();
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.asset.setComplete).toHaveBeenCalledTimes(3);
});
it('should retry job queueing on transient failures', async () => {
mocks.job.queue.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onComplete({ id: assetId, path, fileModifiedAt });
expect(mocks.job.queue).toHaveBeenCalledTimes(2);
});
});
describe('onCancel', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
it('should delete file and remove asset record', async () => {
await sut.onCancel(assetId, path);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should retry unlink on transient failures', async () => {
mocks.storage.unlink.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onCancel(assetId, path);
expect(mocks.storage.unlink).toHaveBeenCalledTimes(2);
});
it('should retry removeAndDecrementQuota on transient failures', async () => {
mocks.asset.removeAndDecrementQuota.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
await sut.onCancel(assetId, path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledTimes(2);
});
});
describe('removeStaleUploads', () => {
it('should queue cleanup jobs for stale partial assets', async () => {
const staleAssets = [{ id: factory.uuid() }, { id: factory.uuid() }, { id: factory.uuid() }];
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () {
for (const asset of staleAssets) {
yield asset;
}
})(),
);
await sut.removeStaleUploads();
expect(mocks.assetJob.streamForPartialAssetCleanupJob).toHaveBeenCalledWith(expect.any(Date));
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.PartialAssetCleanup, data: staleAssets[0] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[1] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[2] },
]);
});
it('should batch cleanup jobs', async () => {
const assets = Array.from({ length: 1500 }, () => ({ id: factory.uuid() }));
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () {
for (const asset of assets) {
yield asset;
}
})(),
);
await sut.removeStaleUploads();
// Should be called twice: once for 1000, once for 500
expect(mocks.job.queueAll).toHaveBeenCalledTimes(2);
});
it('should handle empty stream', async () => {
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue((async function* () {})());
await sut.removeStaleUploads();
expect(mocks.job.queueAll).toHaveBeenCalledWith([]);
});
});
describe('removeStaleUpload', () => {
const assetId = factory.uuid();
const path = `/upload/${assetId}/file.jpg`;
it('should skip if asset not found', async () => {
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue(undefined);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Skipped);
expect(mocks.storage.stat).not.toHaveBeenCalled();
});
it('should complete asset if file matches expected state', async () => {
const checksum = Buffer.from('checksum');
const fileModifiedAt = new Date();
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum,
fileModifiedAt,
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
mocks.crypto.hashFile.mockResolvedValue(checksum);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
expect(mocks.storage.unlink).not.toHaveBeenCalled();
});
it('should cancel asset if file size does not match', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 512 } as any);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if checksum does not match', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('expected-checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
mocks.crypto.hashFile.mockResolvedValue(Buffer.from('actual-checksum'));
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if file does not exist', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
const error = new Error('File not found') as NodeJS.ErrnoException;
error.code = 'ENOENT';
mocks.storage.stat.mockRejectedValue(error);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
it('should cancel asset if stat fails with permission error', async () => {
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
path,
checksum: Buffer.from('checksum'),
fileModifiedAt: new Date(),
size: 1024,
});
const error = new Error('Permission denied') as NodeJS.ErrnoException;
error.code = 'EACCES';
mocks.storage.stat.mockRejectedValue(error);
const result = await sut.removeStaleUpload({ id: assetId });
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
});
});
});

View File

@@ -1,454 +0,0 @@
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
import { Response } from 'express';
import { DateTime } from 'luxon';
import { createHash } from 'node:crypto';
import { dirname, extname, join } from 'node:path';
import { Readable, Writable } from 'node:stream';
import { SystemConfig } from 'src/config';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/asset-upload.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetMetadataKey,
AssetStatus,
AssetType,
AssetVisibility,
ImmichWorker,
JobName,
JobStatus,
QueueName,
StorageFolder,
} from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
import { withRetry } from 'src/utils/misc';
export const MAX_RUFH_INTEROP_VERSION = 8;
@Injectable()
export class AssetUploadService extends BaseService {
// This is used to proactively abort previous requests for the same asset
// when a new one arrives. The previous request still holds the asset lock
// and will prevent the new request from proceeding until the previous one
// times out. As normal client behavior will not have concurrent requests,
// we can assume the previous request has already failed on the client end.
private activeRequests = new Map<string, { req: Readable; startTime: Date }>();
@OnEvent({ name: 'UploadAbort', workers: [ImmichWorker.Api], server: true })
onUploadAbort({ assetId, abortTime }: ArgOf<'UploadAbort'>) {
const entry = this.activeRequests.get(assetId);
if (!entry) {
return false;
}
if (abortTime > entry.startTime) {
entry.req.destroy();
this.activeRequests.delete(assetId);
}
return true;
}
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { uploadComplete, assetData, uploadLength, contentLength, version } = dto;
const isComplete = uploadComplete !== false;
const isResumable = version && uploadComplete !== undefined;
const { backup } = await this.getConfig({ withCache: true });
const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) {
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
const location = `/api/upload/${asset.id}`;
if (isResumable) {
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
// this is a 5xx to indicate the client should do offset retrieval and resume
res.status(500).send('Incomplete asset already exists');
return;
}
}
if (isComplete && uploadLength !== contentLength) {
return this.sendInconsistentLength(res);
}
const location = `/api/upload/${asset.id}`;
if (isResumable) {
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
}
this.addRequest(asset.id, req);
await this.databaseRepository.withUuidLock(asset.id, async () => {
// conventional upload, check status again with lock acquired before overwriting
if (asset.isDuplicate) {
const existingAsset = await this.assetRepository.getCompletionMetadata(asset.id, auth.user.id);
if (existingAsset?.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
}
await this.storageRepository.mkdir(dirname(asset.path));
let checksumBuffer: Buffer | undefined;
const writeStream = asset.isDuplicate
? this.storageRepository.createWriteStream(asset.path)
: this.storageRepository.createOrAppendWriteStream(asset.path);
this.pipe(req, writeStream, contentLength);
if (isComplete) {
const hash = createHash('sha1');
req.on('data', (data: Buffer) => hash.update(data));
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
}
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
if (isResumable) {
this.setCompleteHeader(res, version, uploadComplete);
}
if (!isComplete) {
res.status(201).set('Location', location).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
return;
}
if (dto.checksum.compare(checksumBuffer!) !== 0) {
return await this.sendChecksumMismatch(res, asset.id, asset.path);
}
await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt });
res.status(200).send({ id: asset.id });
});
}
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
const { uploadComplete, uploadLength, uploadOffset, contentLength, version } = dto;
this.setCompleteHeader(res, version, false);
this.addRequest(id, req);
return this.databaseRepository.withUuidLock(id, async () => {
const completionData = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!completionData) {
res.status(404).send('Asset not found');
return;
}
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
if (status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
if (uploadLength && size && size !== uploadLength) {
return this.sendInconsistentLength(res);
}
const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== uploadOffset) {
return this.sendOffsetMismatch(res, expectedOffset, uploadOffset);
}
const newLength = uploadOffset + contentLength;
if (uploadLength !== undefined && newLength > uploadLength) {
res.status(400).send('Upload would exceed declared length');
return;
}
if (contentLength === 0 && !uploadComplete) {
res.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
return;
}
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
this.pipe(req, writeStream, contentLength);
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
this.setCompleteHeader(res, version, uploadComplete);
if (!uploadComplete) {
try {
const offset = await this.getCurrentOffset(path);
res.status(204).setHeader('Upload-Offset', offset.toString()).send();
} catch {
this.logger.error(`Failed to get current offset for ${path} after write`);
res.status(500).send();
}
return;
}
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return await this.sendChecksumMismatch(res, id, path);
}
await this.onComplete({ id, path, fileModifiedAt });
res.status(200).send({ id });
});
}
cancelUpload(auth: AuthDto, assetId: string, res: Response): Promise<void> {
this.abortExistingRequest(assetId);
return this.databaseRepository.withUuidLock(assetId, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompleted(res);
}
await this.onCancel(assetId, asset.path);
res.status(204).send();
});
}
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
const { backup } = await this.getConfig({ withCache: true });
this.abortExistingRequest(id);
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
const offset = await this.getCurrentOffset(asset.path);
this.setCompleteHeader(res, version, asset.status !== AssetStatus.Partial);
res
.status(204)
.setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', this.getUploadLimits(backup))
.send();
});
}
async getUploadOptions(res: Response): Promise<void> {
const { backup } = await this.getConfig({ withCache: true });
res.status(204).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
}
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
async removeStaleUploads(): Promise<void> {
const config = await this.getConfig({ withCache: false });
const createdBefore = DateTime.now().minus({ hours: config.backup.upload.maxAgeHours }).toJSDate();
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
for await (const asset of assets) {
jobs.push({ name: JobName.PartialAssetCleanup, data: asset });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
}
@OnJob({ name: JobName.PartialAssetCleanup, queue: QueueName.BackgroundTask })
removeStaleUpload({ id }: JobOf<JobName.PartialAssetCleanup>): Promise<JobStatus> {
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetJobRepository.getForPartialAssetCleanupJob(id);
if (!asset) {
return JobStatus.Skipped;
}
const { checksum, fileModifiedAt, path, size } = asset;
try {
const stat = await this.storageRepository.stat(path);
if (size === stat.size && checksum === (await this.cryptoRepository.hashFile(path))) {
await this.onComplete({ id, path, fileModifiedAt });
return JobStatus.Success;
}
} catch (error: any) {
this.logger.debugFn(() => `Failed to check upload file ${path}: ${error.message}`);
}
await this.onCancel(id, path);
return JobStatus.Success;
});
}
async onStart(
auth: AuthDto,
{ assetData, checksum, uploadLength }: StartUploadDto,
): Promise<{ id: string; path: string; status: AssetStatus; isDuplicate: boolean }> {
const assetId = this.cryptoRepository.randomUUID();
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId);
const extension = extname(assetData.filename);
const path = join(folder, `${assetId}${extension}`);
const type = mimeTypes.assetType(path);
if (type === AssetType.Other) {
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
}
this.validateQuota(auth, uploadLength);
try {
await this.assetRepository.createWithMetadata(
{
id: assetId,
ownerId: auth.user.id,
libraryId: null,
checksum,
originalPath: path,
deviceAssetId: assetData.deviceAssetId,
deviceId: assetData.deviceId,
fileCreatedAt: assetData.fileCreatedAt,
fileModifiedAt: assetData.fileModifiedAt,
localDateTime: assetData.fileCreatedAt,
type,
isFavorite: assetData.isFavorite,
livePhotoVideoId: assetData.livePhotoVideoId,
visibility: AssetVisibility.Hidden,
originalFileName: assetData.filename,
status: AssetStatus.Partial,
},
uploadLength,
assetData.iCloudId ? [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: assetData.iCloudId } }] : undefined,
);
} catch (error: any) {
if (!isAssetChecksumConstraint(error)) {
this.logger.error(`Error creating upload asset record: ${error.message}`);
throw new InternalServerErrorException('Error creating asset');
}
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksum);
if (!duplicate) {
throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
}
return { id: duplicate.id, path, status: duplicate.status, isDuplicate: true };
}
return { id: assetId, path, status: AssetStatus.Partial, isDuplicate: false };
}
async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
this.logger.log('Completing upload for asset', id);
const jobData = { name: JobName.AssetExtractMetadata, data: { id, source: 'upload' } } as const;
await withRetry(() => this.assetRepository.setComplete(id));
try {
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} catch (error: any) {
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
}
await withRetry(() => this.jobRepository.queue(jobData));
}
async onCancel(assetId: string, path: string): Promise<void> {
this.logger.log('Cancelling upload for asset', assetId);
await withRetry(() => this.storageRepository.unlink(path));
await withRetry(() => this.assetRepository.removeAndDecrementQuota(assetId));
}
private addRequest(assetId: string, req: Readable) {
const addTime = new Date();
const activeRequest = { req, startTime: addTime };
this.abortExistingRequest(assetId, addTime);
this.activeRequests.set(assetId, activeRequest);
req.on('close', () => {
if (this.activeRequests.get(assetId)?.req === req) {
this.activeRequests.delete(assetId);
}
});
}
private abortExistingRequest(assetId: string, abortTime = new Date()) {
const abortEvent = { assetId, abortTime };
// only emit if we didn't just abort it ourselves
if (!this.onUploadAbort(abortEvent)) {
this.eventRepository.serverSend('UploadAbort', abortEvent);
}
}
private pipe(req: Readable, writeStream: Writable, size: number) {
let receivedLength = 0;
req.on('data', (data: Buffer) => {
receivedLength += data.length;
if (!writeStream.write(data)) {
req.pause();
writeStream.once('drain', () => req.resume());
}
});
req.on('close', () => {
if (receivedLength < size) {
writeStream.emit('error', new Error('Request closed before all data received'));
}
writeStream.end();
});
}
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number, limits: string): void {
if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket
socket.write(
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
`Location: ${location}\r\n` +
`Upload-Limit: ${limits}\r\n` +
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
);
}
}
private sendInconsistentLength(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
title: 'inconsistent length values for upload',
});
}
private sendAlreadyCompleted(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
title: 'upload is already completed',
});
}
private sendOffsetMismatch(res: Response, expected: number, actual: number): void {
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource',
'expected-offset': expected,
'provided-offset': actual,
});
}
private sendChecksumMismatch(res: Response, assetId: string, path: string) {
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
res.status(460).send('File on server does not match provided checksum');
return this.onCancel(assetId, path);
}
private validateQuota(auth: AuthDto, size: number): void {
const { quotaSizeInBytes: quotaLimit, quotaUsageInBytes: currentUsage } = auth.user;
if (quotaLimit === null) {
return;
}
if (quotaLimit < currentUsage + size) {
throw new BadRequestException('Quota has been exceeded!');
}
}
private async getCurrentOffset(path: string): Promise<number> {
try {
const stat = await this.storageRepository.stat(path);
return stat.size;
} catch (error: any) {
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
return 0;
}
throw error;
}
}
private setCompleteHeader(res: Response, interopVersion: number | undefined, isComplete: boolean): void {
if (interopVersion === undefined || interopVersion > 3) {
res.setHeader('Upload-Complete', isComplete ? '?1' : '?0');
} else {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
}
}
private getUploadLimits({ upload }: SystemConfig['backup']) {
return `min-size=1, max-age=${upload.maxAgeHours * 3600}`;
}
}

View File

@@ -3,7 +3,6 @@ import { AlbumService } from 'src/services/album.service';
import { ApiKeyService } from 'src/services/api-key.service';
import { ApiService } from 'src/services/api.service';
import { AssetMediaService } from 'src/services/asset-media.service';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { AssetService } from 'src/services/asset.service';
import { AuditService } from 'src/services/audit.service';
import { AuthAdminService } from 'src/services/auth-admin.service';
@@ -48,7 +47,6 @@ export const services = [
AlbumService,
ApiService,
AssetMediaService,
AssetUploadService,
AssetService,
AuditService,
AuthService,

View File

@@ -48,7 +48,6 @@ describe(JobService.name, () => {
{ name: JobName.UserSyncUsage },
{ name: JobName.AssetGenerateThumbnailsQueueAll, data: { force: false } },
{ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } },
{ name: JobName.PartialAssetCleanupQueueAll },
]);
});
});

View File

@@ -302,10 +302,6 @@ export class JobService extends BaseService {
jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } });
}
if (config.nightlyTasks.removeStaleUploads) {
jobs.push({ name: JobName.PartialAssetCleanupQueueAll });
}
await this.jobRepository.queueAll(jobs);
}

View File

@@ -46,9 +46,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
cronExpression: '0 02 * * *',
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 30,
@@ -118,7 +115,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
missingThumbnails: true,
generateMemories: true,
syncQuotaUsage: true,
removeStaleUploads: true,
},
reverseGeocoding: {
enabled: true,

View File

@@ -352,8 +352,6 @@ export type JobItem =
| { name: JobName.PersonCleanup; data?: IBaseJob }
| { name: JobName.AssetDelete; data: IAssetDeleteJob }
| { name: JobName.AssetDeleteCheck; data?: IBaseJob }
| { name: JobName.PartialAssetCleanup; data: IEntityJob }
| { name: JobName.PartialAssetCleanupQueueAll; data?: IBaseJob }
// Library Management
| { name: JobName.LibrarySyncFiles; data: ILibraryFileJob }

View File

@@ -99,7 +99,7 @@ export const getKyselyConfig = (
}),
}),
log(event) {
if (event.level === 'error' && (event.error as PostgresError).constraint_name !== ASSET_CHECKSUM_CONSTRAINT) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,

View File

@@ -14,7 +14,6 @@ import {
import _ from 'lodash';
import { writeFileSync } from 'node:fs';
import path from 'node:path';
import { setTimeout } from 'node:timers/promises';
import picomatch from 'picomatch';
import parse from 'picomatch/lib/parse';
import { SystemConfig } from 'src/config';
@@ -325,18 +324,3 @@ export const globToSqlPattern = (glob: string) => {
export function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
export async function withRetry<T>(operation: () => Promise<T>, retries: number = 2, delay: number = 100): Promise<T> {
let lastError: any;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
return await operation();
} catch (error: any) {
lastError = error;
}
if (attempt < retries) {
await setTimeout(delay);
}
}
throw lastError;
}

View File

@@ -1,35 +1,5 @@
import { BadRequestException } from '@nestjs/common';
import { plainToInstance } from 'class-transformer';
import { validateSync } from 'class-validator';
export const fromChecksum = (checksum: string): Buffer => {
return Buffer.from(checksum, checksum.length === 28 ? 'base64' : 'hex');
};
export const fromMaybeArray = <T>(param: T | T[]) => (Array.isArray(param) ? param[0] : param);
export function validateSyncOrReject<T extends object>(cls: new () => T, obj: any): T {
const dto = plainToInstance(cls, obj, { excludeExtraneousValues: true });
const errors = validateSync(dto);
if (errors.length === 0) {
return dto;
}
const constraints = [];
for (const error of errors) {
if (error.constraints) {
constraints.push(...Object.values(error.constraints));
}
if (!error.children) {
continue;
}
for (const child of error.children) {
if (child.constraints) {
constraints.push(...Object.values(child.constraints));
}
}
}
throw new BadRequestException(constraints);
}

View File

@@ -45,9 +45,5 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
upsertMetadata: vitest.fn(),
getMetadataByKey: vitest.fn(),
deleteMetadataByKey: vitest.fn(),
getCompletionMetadata: vitest.fn(),
createWithMetadata: vitest.fn(),
removeAndDecrementQuota: vitest.fn(),
setComplete: vitest.fn(),
};
};

View File

@@ -20,7 +20,6 @@ export const newDatabaseRepositoryMock = (): Mocked<RepositoryInterface<Database
prewarm: vitest.fn(),
runMigrations: vitest.fn(),
withLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
withUuidLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
tryLock: vitest.fn(),
isBusy: vitest.fn(),
wait: vitest.fn(),

View File

@@ -51,7 +51,6 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
readFile: vitest.fn(),
createFile: vitest.fn(),
createWriteStream: vitest.fn(),
createOrAppendWriteStream: vitest.fn(),
createOrOverwriteFile: vitest.fn(),
existsSync: vitest.fn(),
overwriteFile: vitest.fn(),
@@ -59,7 +58,6 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
unlinkDir: vitest.fn().mockResolvedValue(true),
removeEmptyDirs: vitest.fn(),
checkFileExists: vitest.fn(),
mkdir: vitest.fn(),
mkdirSync: vitest.fn(),
checkDiskUsage: vitest.fn(),
readdir: vitest.fn(),

View File

@@ -44,7 +44,7 @@
"geo-coordinates-parser": "^1.7.4",
"geojson": "^0.5.0",
"handlebars": "^4.7.8",
"happy-dom": "^18.0.1",
"happy-dom": "^20.0.0",
"intl-messageformat": "^10.7.11",
"justified-layout": "^4.1.0",
"lodash-es": "^4.17.21",
@@ -89,7 +89,7 @@
"eslint-plugin-unicorn": "^61.0.2",
"factory.ts": "^1.4.1",
"globals": "^16.0.0",
"happy-dom": "^18.0.1",
"happy-dom": "^20.0.0",
"prettier": "^3.4.2",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-sort-json": "^4.1.1",

View File

@@ -89,10 +89,10 @@
let { isViewing: showAssetViewer, asset: viewingAsset, gridScrollTarget } = assetViewingStore;
let element: HTMLElement | undefined = $state();
let scrollableElement: HTMLElement | undefined = $state();
let timelineElement: HTMLElement | undefined = $state();
let showSkeleton = $state(true);
let invisible = $state(true);
// The percentage of scroll through the month that is currently intersecting the top boundary of the viewport.
// Note: There may be multiple months visible within the viewport at any given time.
let viewportTopMonthScrollPercent = $state(0);
@@ -124,43 +124,22 @@
timelineManager.setLayoutOptions(layoutOptions);
});
const scrollTo = (top: number) => {
if (element) {
element.scrollTo({ top });
}
};
const scrollTop = (top: number) => {
if (element) {
element.scrollTop = top;
}
};
const scrollBy = (y: number) => {
if (element) {
element.scrollBy(0, y);
}
};
$effect(() => {
timelineManager.scrollableElement = scrollableElement;
});
const scrollToTop = () => {
scrollTo(0);
timelineManager.scrollTo(0);
};
const getAssetHeight = (assetId: string, monthGroup: MonthGroup) => {
// the following method may trigger any layouts, so need to
// handle any scroll compensation that may have been set
const height = monthGroup!.findAssetAbsolutePosition(assetId);
while (timelineManager.scrollCompensation.monthGroup) {
handleScrollCompensation(timelineManager.scrollCompensation);
timelineManager.clearScrollCompensation();
}
return height;
};
const getAssetHeight = (assetId: string, monthGroup: MonthGroup) => monthGroup.findAssetAbsolutePosition(assetId);
const assetIsVisible = (assetTop: number): boolean => {
if (!element) {
if (!scrollableElement) {
return false;
}
const { clientHeight, scrollTop } = element;
const { clientHeight, scrollTop } = scrollableElement;
return assetTop >= scrollTop && assetTop < scrollTop + clientHeight;
};
@@ -177,8 +156,7 @@
return true;
}
scrollTo(height);
updateSlidingWindow();
timelineManager.scrollTo(height);
return true;
};
@@ -188,8 +166,7 @@
return false;
}
const height = getAssetHeight(asset.id, monthGroup);
scrollTo(height);
updateSlidingWindow();
timelineManager.scrollTo(height);
return true;
};
@@ -203,7 +180,7 @@
// if the asset is not found, scroll to the top
scrollToTop();
}
showSkeleton = false;
invisible = false;
};
beforeNavigate(() => (timelineManager.suspendTransitions = true));
@@ -230,7 +207,7 @@
} else {
scrollToTop();
}
showSkeleton = false;
invisible = false;
}, 500);
}
};
@@ -244,26 +221,12 @@
const updateIsScrolling = () => (timelineManager.scrolling = true);
// note: don't throttle, debounch, or otherwise do this function async - it causes flicker
const updateSlidingWindow = () => timelineManager.updateSlidingWindow(element?.scrollTop || 0);
const handleScrollCompensation = ({ heightDelta, scrollTop }: { heightDelta?: number; scrollTop?: number }) => {
if (heightDelta !== undefined) {
scrollBy(heightDelta);
} else if (scrollTop !== undefined) {
scrollTo(scrollTop);
}
// Yes, updateSlideWindow() is called by the onScroll event triggered as a result of
// the above calls. However, this delay is enough time to set the intersecting property
// of the monthGroup to false, then true, which causes the DOM nodes to be recreated,
// causing bad perf, and also, disrupting focus of those elements.
updateSlidingWindow();
};
const topSectionResizeObserver: OnResizeCallback = ({ height }) => (timelineManager.topSectionHeight = height);
onMount(() => {
if (!enableRouting) {
showSkeleton = false;
invisible = false;
}
});
@@ -273,11 +236,13 @@
};
const getMaxScroll = () => {
if (!element || !timelineElement) {
if (!scrollableElement || !timelineElement) {
return 0;
}
return (
timelineManager.topSectionHeight + bottomSectionHeight + (timelineElement.clientHeight - element.clientHeight)
timelineManager.topSectionHeight +
bottomSectionHeight +
(timelineElement.clientHeight - scrollableElement.clientHeight)
);
};
@@ -287,7 +252,7 @@
const delta = monthGroup.height * monthGroupScrollPercent;
const scrollToTop = (topOffset + delta) * maxScrollPercent;
scrollTop(scrollToTop);
timelineManager.scrollTo(scrollToTop);
};
// note: don't throttle, debounce, or otherwise make this function async - it causes flicker
@@ -299,7 +264,7 @@
// edge case - scroll limited due to size of content, must adjust - use use the overall percent instead
const maxScroll = getMaxScroll();
const offset = maxScroll * overallScrollPercent;
scrollTop(offset);
timelineManager.scrollTo(offset);
} else {
const monthGroup = timelineManager.months.find(
({ yearMonth: { year, month } }) => year === scrubberMonth.year && month === scrubberMonth.month,
@@ -315,26 +280,26 @@
const handleTimelineScroll = () => {
isInLeadOutSection = false;
if (!element) {
if (!scrollableElement) {
return;
}
if (timelineManager.timelineHeight < timelineManager.viewportHeight * 2) {
// edge case - scroll limited due to size of content, must adjust - use the overall percent instead
const maxScroll = getMaxScroll();
timelineScrollPercent = Math.min(1, element.scrollTop / maxScroll);
timelineScrollPercent = Math.min(1, scrollableElement.scrollTop / maxScroll);
viewportTopMonth = undefined;
viewportTopMonthScrollPercent = 0;
} else {
let top = element.scrollTop;
let top = scrollableElement.scrollTop;
if (top < timelineManager.topSectionHeight) {
// in the lead-in area
viewportTopMonth = undefined;
viewportTopMonthScrollPercent = 0;
const maxScroll = getMaxScroll();
timelineScrollPercent = Math.min(1, element.scrollTop / maxScroll);
timelineScrollPercent = Math.min(1, scrollableElement.scrollTop / maxScroll);
return;
}
@@ -441,7 +406,7 @@
onSelect(asset);
if (singleSelect) {
scrollTop(0);
timelineManager.scrollTo(0);
return;
}
@@ -591,10 +556,10 @@
if (evt.key === 'ArrowUp') {
amount = -amount;
if (shiftKeyIsDown) {
element?.scrollBy({ top: amount, behavior: 'smooth' });
scrollableElement?.scrollBy({ top: amount, behavior: 'smooth' });
}
} else if (evt.key === 'ArrowDown') {
element?.scrollBy({ top: amount, behavior: 'smooth' });
scrollableElement?.scrollBy({ top: amount, behavior: 'smooth' });
}
}}
/>
@@ -607,19 +572,19 @@
style:margin-right={(usingMobileDevice ? 0 : scrubberWidth) + 'px'}
tabindex="-1"
bind:clientHeight={timelineManager.viewportHeight}
bind:clientWidth={null, (v: number) => ((timelineManager.viewportWidth = v), updateSlidingWindow())}
bind:this={element}
onscroll={() => (handleTimelineScroll(), updateSlidingWindow(), updateIsScrolling())}
bind:clientWidth={timelineManager.viewportWidth}
bind:this={scrollableElement}
onscroll={() => (handleTimelineScroll(), timelineManager.updateSlidingWindow(), updateIsScrolling())}
>
<section
bind:this={timelineElement}
id="virtual-timeline"
class:invisible={showSkeleton}
class:invisible
style:height={timelineManager.timelineHeight + 'px'}
>
<section
use:resizeObserver={topSectionResizeObserver}
class:invisible={showSkeleton}
class:invisible
style:position="absolute"
style:left="0"
style:right="0"
@@ -642,10 +607,7 @@
style:transform={`translate3d(0,${absoluteHeight}px,0)`}
style:width="100%"
>
<Skeleton
height={monthGroup.height - monthGroup.timelineManager.headerHeight}
title={monthGroup.monthGroupTitle}
/>
<Skeleton {invisible} height={monthGroup.height} title={monthGroup.monthGroupTitle} />
</div>
{:else if display}
<div
@@ -666,7 +628,6 @@
onSelect={({ title, assets }) => handleGroupSelect(timelineManager, title, assets)}
onSelectAssetCandidates={handleSelectAssetCandidates}
onSelectAssets={handleSelectAssets}
onScrollCompensation={handleScrollCompensation}
{customLayout}
{onThumbnailClick}
/>
@@ -686,7 +647,7 @@
<Portal target="body">
{#if $showAssetViewer}
<TimelineAssetViewer bind:showSkeleton {timelineManager} {removeAction} {withStacked} {isShared} {album} {person} />
<TimelineAssetViewer bind:invisible {timelineManager} {removeAction} {withStacked} {isShared} {album} {person} />
{/if}
</Portal>

View File

@@ -13,7 +13,7 @@
interface Props {
timelineManager: TimelineManager;
showSkeleton: boolean;
invisible: boolean;
withStacked?: boolean;
isShared?: boolean;
album?: AlbumResponseDto | null;
@@ -30,7 +30,7 @@
let {
timelineManager,
showSkeleton = $bindable(false),
invisible = $bindable(false),
removeAction,
withStacked = false,
isShared = false,
@@ -81,7 +81,7 @@
const handleClose = async (asset: { id: string }) => {
assetViewingStore.showAssetViewer(false);
showSkeleton = true;
invisible = true;
$gridScrollTarget = { at: asset.id };
await navigate({ targetRoute: 'current', assetId: null, assetGridRouteSearchParams: $gridScrollTarget });
};

View File

@@ -14,7 +14,7 @@
import { fromTimelinePlainDate, getDateLocaleString } from '$lib/utils/timeline-util';
import { Icon } from '@immich/ui';
import type { Snippet } from 'svelte';
import { type Snippet } from 'svelte';
import { flip } from 'svelte/animate';
import { scale } from 'svelte/transition';
@@ -33,7 +33,6 @@
onSelect: ({ title, assets }: { title: string; assets: TimelineAsset[] }) => void;
onSelectAssets: (asset: TimelineAsset) => void;
onSelectAssetCandidates: (asset: TimelineAsset | null) => void;
onScrollCompensation: (compensation: { heightDelta?: number; scrollTop?: number }) => void;
onThumbnailClick?: (
asset: TimelineAsset,
timelineManager: TimelineManager,
@@ -59,7 +58,7 @@
onSelect,
onSelectAssets,
onSelectAssetCandidates,
onScrollCompensation,
onThumbnailClick,
}: Props = $props();
@@ -134,13 +133,6 @@
});
return getDateLocaleString(date);
};
$effect.root(() => {
if (timelineManager.scrollCompensation.monthGroup === monthGroup) {
onScrollCompensation(timelineManager.scrollCompensation);
timelineManager.clearScrollCompensation();
}
});
</script>
{#each filterIntersecting(monthGroup.dayGroups) as dayGroup, groupIndex (dayGroup.day)}

View File

@@ -2,24 +2,21 @@
interface Props {
height: number;
title?: string;
invisible?: boolean;
}
let { height = 0, title }: Props = $props();
let { height = 0, title, invisible = false }: Props = $props();
</script>
<div class="overflow-clip" style:height={height + 'px'}>
<div class={['overflow-clip', { invisible }]} style:height={height + 'px'}>
{#if title}
<div
class="flex pt-7 pb-5 h-6 place-items-center text-xs font-medium text-immich-fg bg-light dark:text-immich-dark-fg md:text-sm"
class="flex pt-7 pb-5 max-md:pt-5 max-md:pb-3 h-6 place-items-center text-xs font-medium text-immich-fg dark:text-immich-dark-fg md:text-sm"
>
{title}
</div>
{/if}
<div
class="animate-pulse absolute h-full ms-[10px] me-[10px]"
style:width="calc(100% - 20px)"
data-skeleton="true"
></div>
<div class="animate-pulse h-full w-full" data-skeleton="true"></div>
</div>
<style>
@@ -47,4 +44,7 @@
0s linear 0.1s forwards delayedVisibility,
pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
}
.invisible [data-skeleton] {
visibility: hidden !important;
}
</style>

View File

@@ -55,7 +55,7 @@ export function calculateMonthGroupIntersecting(
}
/**
* Calculate intersection for viewer assets with additional parameters like header height and scroll compensation
* Calculate intersection for viewer assets with additional parameters like header height
*/
export function calculateViewerAssetIntersecting(
timelineManager: TimelineManager,
@@ -64,12 +64,8 @@ export function calculateViewerAssetIntersecting(
expandTop: number = INTERSECTION_EXPAND_TOP,
expandBottom: number = INTERSECTION_EXPAND_BOTTOM,
) {
const scrollCompensationHeightDelta = timelineManager.scrollCompensation?.heightDelta ?? 0;
const topWindow =
timelineManager.visibleWindow.top - timelineManager.headerHeight - expandTop + scrollCompensationHeightDelta;
const bottomWindow =
timelineManager.visibleWindow.bottom + timelineManager.headerHeight + expandBottom + scrollCompensationHeightDelta;
const topWindow = timelineManager.visibleWindow.top - timelineManager.headerHeight - expandTop;
const bottomWindow = timelineManager.visibleWindow.bottom + timelineManager.headerHeight + expandBottom;
const positionBottom = positionTop + positionHeight;

View File

@@ -4,7 +4,6 @@ import { getTimeBucket } from '@immich/sdk';
import type { MonthGroup } from '../month-group.svelte';
import type { TimelineManager } from '../timeline-manager.svelte';
import type { TimelineManagerOptions } from '../types';
import { layoutMonthGroup } from './layout-support.svelte';
export async function loadFromTimeBuckets(
timelineManager: TimelineManager,
@@ -55,6 +54,4 @@ export async function loadFromTimeBuckets(
)}`,
);
}
layoutMonthGroup(timelineManager, monthGroup);
}

View File

@@ -242,42 +242,40 @@ export class MonthGroup {
if (this.#height === height) {
return;
}
const { timelineManager: store, percent } = this;
const index = store.months.indexOf(this);
const timelineManager = this.timelineManager;
const index = timelineManager.months.indexOf(this);
const heightDelta = height - this.#height;
this.#height = height;
const prevMonthGroup = store.months[index - 1];
const prevMonthGroup = timelineManager.months[index - 1];
if (prevMonthGroup) {
const newTop = prevMonthGroup.#top + prevMonthGroup.#height;
if (this.#top !== newTop) {
this.#top = newTop;
}
}
for (let cursor = index + 1; cursor < store.months.length; cursor++) {
if (heightDelta === 0) {
return;
}
for (let cursor = index + 1; cursor < timelineManager.months.length; cursor++) {
const monthGroup = this.timelineManager.months[cursor];
const newTop = monthGroup.#top + heightDelta;
if (monthGroup.#top !== newTop) {
monthGroup.#top = newTop;
}
}
if (store.topIntersectingMonthGroup) {
const currentIndex = store.months.indexOf(store.topIntersectingMonthGroup);
if (currentIndex > 0) {
if (index < currentIndex) {
store.scrollCompensation = {
heightDelta,
scrollTop: undefined,
monthGroup: this,
};
} else if (percent > 0) {
const top = this.top + height * percent;
store.scrollCompensation = {
heightDelta: undefined,
scrollTop: top,
monthGroup: this,
};
}
}
if (!timelineManager.viewportTopMonthIntersection) {
return;
}
const { month, monthBottomViewportRatio, viewportTopRatioInMonth } = timelineManager.viewportTopMonthIntersection;
const currentIndex = month ? timelineManager.months.indexOf(month) : -1;
if (!month || currentIndex <= 0 || index > currentIndex) {
return;
}
if (index < currentIndex || monthBottomViewportRatio < 1) {
timelineManager.scrollBy(heightDelta);
} else if (index === currentIndex) {
const scrollTo = this.top + height * viewportTopRatioInMonth;
timelineManager.scrollTo(scrollTo);
}
}

View File

@@ -4,6 +4,7 @@ import { AbortError } from '$lib/utils';
import { fromISODateTimeUTCToObject } from '$lib/utils/timeline-util';
import { type AssetResponseDto, type TimeBucketAssetResponseDto } from '@immich/sdk';
import { timelineAssetFactory, toResponseDto } from '@test-data/factories/asset-factory';
import { tick } from 'svelte';
import { TimelineManager } from './timeline-manager.svelte';
import type { TimelineAsset } from './types';
@@ -64,6 +65,7 @@ describe('TimelineManager', () => {
sdkMock.getTimeBucket.mockImplementation(({ timeBucket }) => Promise.resolve(bucketAssetsResponse[timeBucket]));
await timelineManager.updateViewport({ width: 1588, height: 1000 });
await tick();
});
it('should load months in viewport', () => {

View File

@@ -37,6 +37,13 @@ import type {
Viewport,
} from './types';
type ViewportTopMonthIntersection = {
month: MonthGroup | undefined;
// Where viewport top intersects month (0 = month top, 1 = month bottom)
viewportTopRatioInMonth: number;
// Where month bottom is in viewport (0 = viewport top, 1 = viewport bottom)
monthBottomViewportRatio: number;
};
export class TimelineManager {
isInitialized = $state(false);
months: MonthGroup[] = $state([]);
@@ -49,7 +56,7 @@ export class TimelineManager {
scrubberMonths: ScrubberMonth[] = $state([]);
scrubberTimelineHeight: number = $state(0);
topIntersectingMonthGroup: MonthGroup | undefined = $state();
viewportTopMonthIntersection: ViewportTopMonthIntersection | undefined;
visibleWindow = $derived.by(() => ({
top: this.#scrollTop,
@@ -87,15 +94,8 @@ export class TimelineManager {
#suspendTransitions = $state(false);
#resetScrolling = debounce(() => (this.#scrolling = false), 1000);
#resetSuspendTransitions = debounce(() => (this.suspendTransitions = false), 1000);
scrollCompensation: {
heightDelta: number | undefined;
scrollTop: number | undefined;
monthGroup: MonthGroup | undefined;
} = $state({
heightDelta: 0,
scrollTop: 0,
monthGroup: undefined,
});
#updatingIntersections = false;
#scrollableElement: HTMLElement | undefined = $state();
constructor() {}
@@ -109,6 +109,20 @@ export class TimelineManager {
}
}
set scrollableElement(element: HTMLElement | undefined) {
this.#scrollableElement = element;
}
scrollTo(top: number) {
this.#scrollableElement?.scrollTo({ top });
this.updateSlidingWindow();
}
scrollBy(y: number) {
this.#scrollableElement?.scrollBy(0, y);
this.updateSlidingWindow();
}
#setHeaderHeight(value: number) {
if (this.#headerHeight == value) {
return false;
@@ -172,7 +186,8 @@ export class TimelineManager {
const changed = value !== this.#viewportWidth;
this.#viewportWidth = value;
this.suspendTransitions = true;
void this.#updateViewportGeometry(changed);
this.#updateViewportGeometry(changed);
this.updateSlidingWindow();
}
get viewportWidth() {
@@ -234,46 +249,52 @@ export class TimelineManager {
this.#websocketSupport = undefined;
}
updateSlidingWindow(scrollTop: number) {
updateSlidingWindow() {
const scrollTop = this.#scrollableElement?.scrollTop ?? 0;
if (this.#scrollTop !== scrollTop) {
this.#scrollTop = scrollTop;
this.updateIntersections();
}
}
clearScrollCompensation() {
this.scrollCompensation = {
heightDelta: undefined,
scrollTop: undefined,
monthGroup: undefined,
};
#calculateMonthBottomViewportRatio(month: MonthGroup | undefined) {
if (!month) {
return 0;
}
const windowHeight = this.visibleWindow.bottom - this.visibleWindow.top;
const bottomOfMonth = month.top + month.height;
const bottomOfMonthInViewport = bottomOfMonth - this.visibleWindow.top;
return clamp(bottomOfMonthInViewport / windowHeight, 0, 1);
}
#calculateVewportTopRatioInMonth(month: MonthGroup | undefined) {
if (!month) {
return 0;
}
return clamp((this.visibleWindow.top - month.top) / month.height, 0, 1);
}
updateIntersections() {
if (!this.isInitialized || this.visibleWindow.bottom === this.visibleWindow.top) {
if (this.#updatingIntersections || !this.isInitialized || this.visibleWindow.bottom === this.visibleWindow.top) {
return;
}
let topIntersectingMonthGroup = undefined;
this.#updatingIntersections = true;
for (const month of this.months) {
updateIntersectionMonthGroup(this, month);
if (!topIntersectingMonthGroup && month.actuallyIntersecting) {
topIntersectingMonthGroup = month;
}
}
if (topIntersectingMonthGroup !== undefined && this.topIntersectingMonthGroup !== topIntersectingMonthGroup) {
this.topIntersectingMonthGroup = topIntersectingMonthGroup;
}
for (const month of this.months) {
if (month === this.topIntersectingMonthGroup) {
this.topIntersectingMonthGroup.percent = clamp(
(this.visibleWindow.top - this.topIntersectingMonthGroup.top) / this.topIntersectingMonthGroup.height,
0,
1,
);
} else {
month.percent = 0;
}
}
const month = this.months.find((month) => month.actuallyIntersecting);
const viewportTopRatioInMonth = this.#calculateVewportTopRatioInMonth(month);
const monthBottomViewportRatio = this.#calculateMonthBottomViewportRatio(month);
this.viewportTopMonthIntersection = {
month,
monthBottomViewportRatio,
viewportTopRatioInMonth,
};
this.#updatingIntersections = false;
}
clearDeferredLayout(month: MonthGroup) {
@@ -401,11 +422,12 @@ export class TimelineManager {
return;
}
const result = await monthGroup.loader?.execute(async (signal: AbortSignal) => {
const executionStatus = await monthGroup.loader?.execute(async (signal: AbortSignal) => {
await loadFromTimeBuckets(this, monthGroup, this.#options, signal);
}, cancelable);
if (result === 'LOADED') {
updateIntersectionMonthGroup(this, monthGroup);
if (executionStatus === 'LOADED') {
updateGeometry(this, monthGroup, { invalidateHeight: false });
this.updateIntersections();
}
}