Compare commits

..

9 Commits

Author SHA1 Message Date
mertalev
7cd5b7f64d remove timer 2025-09-19 10:51:58 -04:00
mertalev
fb798a3492 add unit comment 2025-09-19 10:51:58 -04:00
mertalev
73d650de23 simplify 2025-09-19 10:51:58 -04:00
mertalev
a07f3c2ba4 make resolver configurable 2025-09-19 10:51:58 -04:00
mertalev
9ccd98d871 tweaks 2025-09-19 10:51:58 -04:00
mertalev
188dcbf7d0 check cancellation before queueing 2025-09-19 10:51:58 -04:00
mertalev
51d106d192 refactor
handle missing asset
2025-09-19 10:51:58 -04:00
mertalev
3e427e42cb dynamically batch asset requests 2025-09-19 10:51:58 -04:00
mertalev
f6a99602e9 separate asset and thumbnail concurrency 2025-09-19 10:51:58 -04:00
54 changed files with 1953 additions and 1949 deletions

View File

@@ -169,6 +169,8 @@ Redis (Sentinel) URL example JSON before encoding:
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |

View File

@@ -123,13 +123,6 @@
"logging_enable_description": "Enable logging",
"logging_level_description": "When enabled, what log level to use.",
"logging_settings": "Logging",
"machine_learning_availability_checks": "Availability checks",
"machine_learning_availability_checks_description": "Automatically detect and prefer available machine learning servers",
"machine_learning_availability_checks_enabled": "Enable availability checks",
"machine_learning_availability_checks_interval": "Check interval",
"machine_learning_availability_checks_interval_description": "Interval in milliseconds between availability checks",
"machine_learning_availability_checks_timeout": "Request timeout",
"machine_learning_availability_checks_timeout_description": "Timeout in milliseconds for availability checks",
"machine_learning_clip_model": "CLIP model",
"machine_learning_clip_model_description": "The name of a CLIP model listed <link>here</link>. Note that you must re-run the 'Smart Search' job for all images upon changing a model.",
"machine_learning_duplicate_detection": "Duplicate Detection",
@@ -920,7 +913,6 @@
"cant_get_number_of_comments": "Can't get number of comments",
"cant_search_people": "Can't search people",
"cant_search_places": "Can't search places",
"clipboard_unsupported_mime_type": "The system clipboard does not support copying this type of content: {mimeType}",
"error_adding_assets_to_album": "Error adding assets to album",
"error_adding_users_to_album": "Error adding users to album",
"error_deleting_shared_user": "Error deleting shared user",
@@ -1924,7 +1916,6 @@
"stacktrace": "Stacktrace",
"start": "Start",
"start_date": "Start date",
"start_date_before_end_date": "Start date must be before end date",
"state": "State",
"status": "Status",
"stop_casting": "Stop casting",

View File

@@ -1,7 +1,7 @@
[tools]
node = "22.19.0"
flutter = "3.35.4"
pnpm = "10.15.1"
pnpm = "10.14.0"
dart = "3.8.2"
[tools."github:CQLabs/homebrew-dcm"]

View File

@@ -29,9 +29,11 @@
FAC6F89B2D287C890078CB2F /* ShareExtension.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = FAC6F8902D287C890078CB2F /* ShareExtension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
FAC6F8B72D287F120078CB2F /* ShareViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = FAC6F8B52D287F120078CB2F /* ShareViewController.swift */; };
FAC6F8B92D287F120078CB2F /* MainInterface.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FAC6F8B32D287F120078CB2F /* MainInterface.storyboard */; };
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
FEC340D12E7326630050078A /* AssetResolver.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC340C92E7326630050078A /* AssetResolver.swift */; };
FEC340D22E7326630050078A /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC340CB2E7326630050078A /* Thumbhash.swift */; };
FEC340D32E7326630050078A /* Request.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC340CF2E7326630050078A /* Request.swift */; };
FEC340D42E7326630050078A /* ThumbnailResolver.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC340CC2E7326630050078A /* ThumbnailResolver.swift */; };
FEC340D52E7326630050078A /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC340CD2E7326630050078A /* Thumbnails.g.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -115,9 +117,11 @@
FAC6F8B42D287F120078CB2F /* ShareExtension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = ShareExtension.entitlements; sourceTree = "<group>"; };
FAC6F8B52D287F120078CB2F /* ShareViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShareViewController.swift; sourceTree = "<group>"; };
FAC7416727DB9F5500C668D8 /* RunnerProfile.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = RunnerProfile.entitlements; sourceTree = "<group>"; };
FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Thumbhash.swift; sourceTree = "<group>"; };
FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Thumbnails.g.swift; sourceTree = "<group>"; };
FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThumbnailsImpl.swift; sourceTree = "<group>"; };
FEC340C92E7326630050078A /* AssetResolver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssetResolver.swift; sourceTree = "<group>"; };
FEC340CB2E7326630050078A /* Thumbhash.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Thumbhash.swift; sourceTree = "<group>"; };
FEC340CC2E7326630050078A /* ThumbnailResolver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThumbnailResolver.swift; sourceTree = "<group>"; };
FEC340CD2E7326630050078A /* Thumbnails.g.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Thumbnails.g.swift; sourceTree = "<group>"; };
FEC340CF2E7326630050078A /* Request.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Request.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */
@@ -247,6 +251,7 @@
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
FEC340D02E7326630050078A /* Resolvers */,
B25D37792E72CA15008B6CA7 /* Connectivity */,
B21E34A62E5AF9760031FDB9 /* Background */,
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
@@ -261,7 +266,6 @@
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
FED3B1952E253E9B0030FD97 /* Images */,
);
path = Runner;
sourceTree = "<group>";
@@ -296,16 +300,34 @@
path = ShareExtension;
sourceTree = "<group>";
};
FED3B1952E253E9B0030FD97 /* Images */ = {
FEC340CA2E7326630050078A /* Assets */ = {
isa = PBXGroup;
children = (
FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */,
FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */,
FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */,
FEC340C92E7326630050078A /* AssetResolver.swift */,
);
path = Assets;
sourceTree = "<group>";
};
FEC340CE2E7326630050078A /* Images */ = {
isa = PBXGroup;
children = (
FEC340CB2E7326630050078A /* Thumbhash.swift */,
FEC340CC2E7326630050078A /* ThumbnailResolver.swift */,
FEC340CD2E7326630050078A /* Thumbnails.g.swift */,
);
path = Images;
sourceTree = "<group>";
};
FEC340D02E7326630050078A /* Resolvers */ = {
isa = PBXGroup;
children = (
FEC340CA2E7326630050078A /* Assets */,
FEC340CE2E7326630050078A /* Images */,
FEC340CF2E7326630050078A /* Request.swift */,
);
path = Resolvers;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@@ -573,14 +595,16 @@
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
B21E34AC2E5B09190031FDB9 /* BackgroundWorker.swift in Sources */,
B25D377A2E72CA15008B6CA7 /* Connectivity.g.swift in Sources */,
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */,
B25D377C2E72CA26008B6CA7 /* ConnectivityApiImpl.swift in Sources */,
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */,
B21E34AA2E5AFD2B0031FDB9 /* BackgroundWorkerApiImpl.swift in Sources */,
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */,
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
B2BE315F2E5E5229006EEF88 /* BackgroundWorker.g.swift in Sources */,
65F32F33299D349D00CE9261 /* BackgroundSyncWorker.swift in Sources */,
FEC340D12E7326630050078A /* AssetResolver.swift in Sources */,
FEC340D22E7326630050078A /* Thumbhash.swift in Sources */,
FEC340D32E7326630050078A /* Request.swift in Sources */,
FEC340D42E7326630050078A /* ThumbnailResolver.swift in Sources */,
FEC340D52E7326630050078A /* Thumbnails.g.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View File

@@ -15,7 +15,7 @@ import UIKit
) -> Bool {
// Required for flutter_local_notification
if #available(iOS 10.0, *) {
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
UNUserNotificationCenter.current().delegate = self as UNUserNotificationCenterDelegate
}
GeneratedPluginRegistrant.register(with: self)
@@ -53,7 +53,7 @@ import UIKit
public static func registerPlugins(binaryMessenger: FlutterBinaryMessenger) {
NativeSyncApiSetup.setUp(binaryMessenger: binaryMessenger, api: NativeSyncApiImpl())
ThumbnailApiSetup.setUp(binaryMessenger: binaryMessenger, api: ThumbnailApiImpl())
ThumbnailApiSetup.setUp(binaryMessenger: binaryMessenger, api: ThumbnailResolver())
BackgroundWorkerFgHostApiSetup.setUp(binaryMessenger: binaryMessenger, api: BackgroundWorkerApiImpl())
}
}

View File

@@ -1,211 +0,0 @@
import CryptoKit
import Flutter
import MobileCoreServices
import Photos
class Request {
weak var workItem: DispatchWorkItem?
var isCancelled = false
let callback: (Result<[String: Int64], any Error>) -> Void
init(callback: @escaping (Result<[String: Int64], any Error>) -> Void) {
self.callback = callback
}
}
class ThumbnailApiImpl: ThumbnailApi {
private static let imageManager = PHImageManager.default()
private static let fetchOptions = {
let fetchOptions = PHFetchOptions()
fetchOptions.fetchLimit = 1
fetchOptions.wantsIncrementalChangeDetails = false
return fetchOptions
}()
private static let requestOptions = {
let requestOptions = PHImageRequestOptions()
requestOptions.isNetworkAccessAllowed = true
requestOptions.deliveryMode = .highQualityFormat
requestOptions.resizeMode = .fast
requestOptions.isSynchronous = true
requestOptions.version = .current
return requestOptions
}()
private static let assetQueue = DispatchQueue(label: "thumbnail.assets", qos: .userInitiated)
private static let requestQueue = DispatchQueue(label: "thumbnail.requests", qos: .userInitiated)
private static let cancelQueue = DispatchQueue(label: "thumbnail.cancellation", qos: .default)
private static let processingQueue = DispatchQueue(label: "thumbnail.processing", qos: .userInteractive, attributes: .concurrent)
private static let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
private static let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue).rawValue
private static var requests = [Int64: Request]()
private static let cancelledResult = Result<[String: Int64], any Error>.success([:])
private static let concurrencySemaphore = DispatchSemaphore(value: ProcessInfo.processInfo.activeProcessorCount * 2)
private static let assetCache = {
let assetCache = NSCache<NSString, PHAsset>()
assetCache.countLimit = 10000
return assetCache
}()
private static let activitySemaphore = DispatchSemaphore(value: 1)
private static let willResignActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.willResignActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.suspend()
activitySemaphore.wait()
}
private static let didBecomeActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.didBecomeActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.resume()
activitySemaphore.signal()
}
func getThumbhash(thumbhash: String, completion: @escaping (Result<[String : Int64], any Error>) -> Void) {
Self.processingQueue.async {
guard let data = Data(base64Encoded: thumbhash)
else { return completion(.failure(PigeonError(code: "", message: "Invalid base64 string: \(thumbhash)", details: nil)))}
let (width, height, pointer) = thumbHashToRGBA(hash: data)
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer.baseAddress)), "width": Int64(width), "height": Int64(height)]))
}
}
func requestImage(assetId: String, requestId: Int64, width: Int64, height: Int64, isVideo: Bool, completion: @escaping (Result<[String: Int64], any Error>) -> Void) {
let request = Request(callback: completion)
let item = DispatchWorkItem {
if request.isCancelled {
return completion(Self.cancelledResult)
}
Self.concurrencySemaphore.wait()
defer {
Self.concurrencySemaphore.signal()
}
if request.isCancelled {
return completion(Self.cancelledResult)
}
guard let asset = Self.requestAsset(assetId: assetId)
else {
Self.removeRequest(requestId: requestId)
completion(.failure(PigeonError(code: "", message: "Could not get asset data for \(assetId)", details: nil)))
return
}
if request.isCancelled {
return completion(Self.cancelledResult)
}
var image: UIImage?
Self.imageManager.requestImage(
for: asset,
targetSize: width > 0 && height > 0 ? CGSize(width: Double(width), height: Double(height)) : PHImageManagerMaximumSize,
contentMode: .aspectFill,
options: Self.requestOptions,
resultHandler: { (_image, info) -> Void in
image = _image
}
)
if request.isCancelled {
return completion(Self.cancelledResult)
}
guard let image = image,
let cgImage = image.cgImage else {
Self.removeRequest(requestId: requestId)
return completion(.failure(PigeonError(code: "", message: "Could not get pixel data for \(assetId)", details: nil)))
}
let pointer = UnsafeMutableRawPointer.allocate(
byteCount: Int(cgImage.width) * Int(cgImage.height) * 4,
alignment: MemoryLayout<UInt8>.alignment
)
if request.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
guard let context = CGContext(
data: pointer,
width: cgImage.width,
height: cgImage.height,
bitsPerComponent: 8,
bytesPerRow: cgImage.width * 4,
space: Self.rgbColorSpace,
bitmapInfo: Self.bitmapInfo
) else {
pointer.deallocate()
Self.removeRequest(requestId: requestId)
return completion(.failure(PigeonError(code: "", message: "Could not create context for \(assetId)", details: nil)))
}
if request.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
context.interpolationQuality = .none
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
if request.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer)), "width": Int64(cgImage.width), "height": Int64(cgImage.height)]))
Self.removeRequest(requestId: requestId)
}
request.workItem = item
Self.addRequest(requestId: requestId, request: request)
Self.processingQueue.async(execute: item)
}
func cancelImageRequest(requestId: Int64) {
Self.cancelRequest(requestId: requestId)
}
private static func addRequest(requestId: Int64, request: Request) -> Void {
requestQueue.sync { requests[requestId] = request }
}
private static func removeRequest(requestId: Int64) -> Void {
requestQueue.sync { requests[requestId] = nil }
}
private static func cancelRequest(requestId: Int64) -> Void {
requestQueue.async {
guard let request = requests.removeValue(forKey: requestId) else { return }
request.isCancelled = true
guard let item = request.workItem else { return }
if item.isCancelled {
cancelQueue.async { request.callback(Self.cancelledResult) }
}
}
}
private static func requestAsset(assetId: String) -> PHAsset? {
var asset: PHAsset?
assetQueue.sync { asset = assetCache.object(forKey: assetId as NSString) }
if asset != nil { return asset }
guard let asset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: Self.fetchOptions).firstObject
else { return nil }
assetQueue.async { assetCache.setObject(asset, forKey: assetId as NSString) }
return asset
}
func waitForActiveState() {
Self.activitySemaphore.wait()
Self.activitySemaphore.signal()
}
}

View File

@@ -0,0 +1,115 @@
import Photos
class AssetRequest: Request {
let assetId: String
var completion: (PHAsset?) -> Void
init(cancellationToken: CancellationToken, assetId: String, completion: @escaping (PHAsset?) -> Void) {
self.assetId = assetId
self.completion = completion
super.init(cancellationToken: cancellationToken)
}
}
class AssetResolver {
private let requestQueue: DispatchQueue
private let processingQueue: DispatchQueue
private var batchTimer: DispatchWorkItem?
private let batchLock = NSLock()
private let batchTimeout: TimeInterval
private let fetchOptions: PHFetchOptions
private var assetRequests = [AssetRequest]()
private let assetCache: NSCache<NSString, PHAsset>
init(
fetchOptions: PHFetchOptions,
batchTimeout: TimeInterval = 0.00025, // 250μs
cacheSize: Int = 10000,
qos: DispatchQoS = .unspecified
) {
self.fetchOptions = fetchOptions
self.batchTimeout = batchTimeout
self.assetCache = NSCache<NSString, PHAsset>()
self.assetCache.countLimit = cacheSize
self.requestQueue = DispatchQueue(label: "assets.requests", qos: qos)
self.processingQueue = DispatchQueue(label: "assets.processing", qos: qos)
}
func requestAsset(request: AssetRequest) {
requestQueue.async {
if (request.isCancelled) {
request.completion(nil)
return
}
if let cachedAsset = self.assetCache.object(forKey: request.assetId as NSString) {
request.completion(cachedAsset)
return
}
self.batchLock.lock()
if (request.isCancelled) {
self.batchLock.unlock()
request.completion(nil)
return
}
self.assetRequests.append(request)
self.batchTimer?.cancel()
let timer = DispatchWorkItem(block: self.processBatch)
self.batchTimer = timer
self.batchLock.unlock()
self.processingQueue.asyncAfter(deadline: .now() + self.batchTimeout, execute: timer)
}
}
private func processBatch() {
batchLock.lock()
if assetRequests.isEmpty {
batchTimer = nil
batchLock.unlock()
return
}
var completionMap = [String: [(PHAsset?) -> Void]]()
var activeAssetIds = [String]()
completionMap.reserveCapacity(assetRequests.count)
activeAssetIds.reserveCapacity(assetRequests.count)
for request in assetRequests {
if (request.isCancelled) {
request.completion(nil)
continue
}
if var completions = completionMap[request.assetId] {
completions.append(request.completion)
} else {
activeAssetIds.append(request.assetId)
completionMap[request.assetId] = [request.completion]
}
}
assetRequests.removeAll(keepingCapacity: true)
batchTimer = nil
batchLock.unlock()
guard !activeAssetIds.isEmpty else { return }
let assets = PHAsset.fetchAssets(withLocalIdentifiers: activeAssetIds, options: self.fetchOptions)
assets.enumerateObjects { asset, _, _ in
let assetId = asset.localIdentifier
for completion in completionMap.removeValue(forKey: assetId)! {
completion(asset)
}
self.requestQueue.async { self.assetCache.setObject(asset, forKey: assetId as NSString) }
}
for completions in completionMap.values {
for completion in completions {
completion(nil)
}
}
}
}

View File

@@ -0,0 +1,198 @@
import CryptoKit
import Flutter
import MobileCoreServices
import Photos
class ThumbnailRequest: Request {
weak var workItem: DispatchWorkItem?
let completion: (Result<[String: Int64], any Error>) -> Void
init(cancellationToken: CancellationToken, completion: @escaping (Result<[String: Int64], any Error>) -> Void) {
self.completion = completion
super.init(cancellationToken: cancellationToken)
}
}
class ThumbnailResolver: ThumbnailApi {
private static let imageManager = PHImageManager.default()
private static let assetResolver = AssetResolver(fetchOptions: {
let fetchOptions = PHFetchOptions()
fetchOptions.wantsIncrementalChangeDetails = false
return fetchOptions
}(), qos: .userInitiated)
private static let requestOptions = {
let requestOptions = PHImageRequestOptions()
requestOptions.isNetworkAccessAllowed = true
requestOptions.deliveryMode = .highQualityFormat
requestOptions.resizeMode = .fast
requestOptions.isSynchronous = true
requestOptions.version = .current
return requestOptions
}()
private static let requestQueue = DispatchQueue(label: "thumbnail.requests", qos: .userInitiated)
private static let cancelQueue = DispatchQueue(label: "thumbnail.cancellation", qos: .default)
private static let processingQueue = DispatchQueue(label: "thumbnail.processing", qos: .userInteractive, attributes: .concurrent)
private static let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
private static let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue).rawValue
private static var requests = [Int64: ThumbnailRequest]()
private static let cancelledResult = Result<[String: Int64], any Error>.success([:])
private static let thumbnailConcurrencySemaphore = DispatchSemaphore(value: ProcessInfo.processInfo.activeProcessorCount / 2 + 1)
private static let activitySemaphore = DispatchSemaphore(value: 1)
private static let willResignActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.willResignActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.suspend()
activitySemaphore.wait()
}
private static let didBecomeActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.didBecomeActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.resume()
activitySemaphore.signal()
}
func getThumbhash(thumbhash: String, completion: @escaping (Result<[String : Int64], any Error>) -> Void) {
Self.processingQueue.async {
guard let data = Data(base64Encoded: thumbhash)
else { return completion(.failure(PigeonError(code: "", message: "Invalid base64 string: \(thumbhash)", details: nil)))}
let (width, height, pointer) = thumbHashToRGBA(hash: data)
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer.baseAddress)), "width": Int64(width), "height": Int64(height)]))
}
}
func requestImage(assetId: String, requestId: Int64, width: Int64, height: Int64, isVideo: Bool, completion: @escaping (Result<[String: Int64], any Error>) -> Void) {
let cancellationToken = CancellationToken()
let thumbnailRequest = ThumbnailRequest(cancellationToken: cancellationToken, completion: completion)
Self.assetResolver.requestAsset(request: AssetRequest(cancellationToken: cancellationToken, assetId: assetId) { asset in
if cancellationToken.isCancelled {
return completion(Self.cancelledResult)
}
let item = DispatchWorkItem {
if cancellationToken.isCancelled {
return completion(Self.cancelledResult)
}
guard let asset = asset else {
if cancellationToken.isCancelled {
return completion(Self.cancelledResult)
}
Self.removeRequest(requestId: requestId)
completion(.failure(PigeonError(code: "", message: "Could not get asset data for \(assetId)", details: nil)))
return
}
Self.thumbnailConcurrencySemaphore.wait()
defer { Self.thumbnailConcurrencySemaphore.signal() }
if cancellationToken.isCancelled {
return completion(Self.cancelledResult)
}
var image: UIImage?
Self.imageManager.requestImage(
for: asset,
targetSize: width > 0 && height > 0 ? CGSize(width: Double(width), height: Double(height)) : PHImageManagerMaximumSize,
contentMode: .aspectFill,
options: Self.requestOptions,
resultHandler: { (_image, info) -> Void in
image = _image
}
)
if cancellationToken.isCancelled {
return completion(Self.cancelledResult)
}
guard let image = image,
let cgImage = image.cgImage else {
Self.removeRequest(requestId: requestId)
return completion(.failure(PigeonError(code: "", message: "Could not get pixel data for \(assetId)", details: nil)))
}
let pointer = UnsafeMutableRawPointer.allocate(
byteCount: Int(cgImage.width) * Int(cgImage.height) * 4,
alignment: MemoryLayout<UInt8>.alignment
)
if cancellationToken.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
guard let context = CGContext(
data: pointer,
width: cgImage.width,
height: cgImage.height,
bitsPerComponent: 8,
bytesPerRow: cgImage.width * 4,
space: Self.rgbColorSpace,
bitmapInfo: Self.bitmapInfo
) else {
pointer.deallocate()
Self.removeRequest(requestId: requestId)
return completion(.failure(PigeonError(code: "", message: "Could not create context for \(assetId)", details: nil)))
}
if cancellationToken.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
context.interpolationQuality = .none
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
if cancellationToken.isCancelled {
pointer.deallocate()
return completion(Self.cancelledResult)
}
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer)), "width": Int64(cgImage.width), "height": Int64(cgImage.height)]))
Self.removeRequest(requestId: requestId)
}
thumbnailRequest.workItem = item
Self.processingQueue.async(execute: item)
})
Self.addRequest(requestId: requestId, request: thumbnailRequest)
}
func cancelImageRequest(requestId: Int64) {
Self.cancelRequest(requestId: requestId)
}
private static func addRequest(requestId: Int64, request: ThumbnailRequest) -> Void {
requestQueue.sync { requests[requestId] = request }
}
private static func removeRequest(requestId: Int64) -> Void {
requestQueue.sync { requests[requestId] = nil }
}
private static func cancelRequest(requestId: Int64) -> Void {
requestQueue.async {
guard let request = requests.removeValue(forKey: requestId) else { return }
request.isCancelled = true
guard let item = request.workItem else { return }
item.cancel()
if item.isCancelled {
cancelQueue.async { request.completion(Self.cancelledResult) }
}
}
}
func waitForActiveState() {
Self.activitySemaphore.wait()
Self.activitySemaphore.signal()
}
}

View File

@@ -0,0 +1,20 @@
class CancellationToken {
var isCancelled = false
}
class Request {
let cancellationToken: CancellationToken
init(cancellationToken: CancellationToken) {
self.cancellationToken = cancellationToken
}
var isCancelled: Bool {
get {
return cancellationToken.isCancelled
}
set(newValue) {
cancellationToken.isCancelled = newValue
}
}
}

View File

@@ -393,7 +393,6 @@ Class | Method | HTTP request | Description
- [LoginCredentialDto](doc//LoginCredentialDto.md)
- [LoginResponseDto](doc//LoginResponseDto.md)
- [LogoutResponseDto](doc//LogoutResponseDto.md)
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
- [ManualJobName](doc//ManualJobName.md)
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)

View File

@@ -164,7 +164,6 @@ part 'model/log_level.dart';
part 'model/login_credential_dto.dart';
part 'model/login_response_dto.dart';
part 'model/logout_response_dto.dart';
part 'model/machine_learning_availability_checks_dto.dart';
part 'model/manual_job_name.dart';
part 'model/map_marker_response_dto.dart';
part 'model/map_reverse_geocode_response_dto.dart';

View File

@@ -382,8 +382,6 @@ class ApiClient {
return LoginResponseDto.fromJson(value);
case 'LogoutResponseDto':
return LogoutResponseDto.fromJson(value);
case 'MachineLearningAvailabilityChecksDto':
return MachineLearningAvailabilityChecksDto.fromJson(value);
case 'ManualJobName':
return ManualJobNameTypeTransformer().decode(value);
case 'MapMarkerResponseDto':

View File

@@ -1,115 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MachineLearningAvailabilityChecksDto {
/// Returns a new [MachineLearningAvailabilityChecksDto] instance.
MachineLearningAvailabilityChecksDto({
required this.enabled,
required this.interval,
required this.timeout,
});
bool enabled;
num interval;
num timeout;
@override
bool operator ==(Object other) => identical(this, other) || other is MachineLearningAvailabilityChecksDto &&
other.enabled == enabled &&
other.interval == interval &&
other.timeout == timeout;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(enabled.hashCode) +
(interval.hashCode) +
(timeout.hashCode);
@override
String toString() => 'MachineLearningAvailabilityChecksDto[enabled=$enabled, interval=$interval, timeout=$timeout]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'enabled'] = this.enabled;
json[r'interval'] = this.interval;
json[r'timeout'] = this.timeout;
return json;
}
/// Returns a new [MachineLearningAvailabilityChecksDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MachineLearningAvailabilityChecksDto? fromJson(dynamic value) {
upgradeDto(value, "MachineLearningAvailabilityChecksDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MachineLearningAvailabilityChecksDto(
enabled: mapValueOfType<bool>(json, r'enabled')!,
interval: num.parse('${json[r'interval']}'),
timeout: num.parse('${json[r'timeout']}'),
);
}
return null;
}
static List<MachineLearningAvailabilityChecksDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MachineLearningAvailabilityChecksDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MachineLearningAvailabilityChecksDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MachineLearningAvailabilityChecksDto> mapFromJson(dynamic json) {
final map = <String, MachineLearningAvailabilityChecksDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MachineLearningAvailabilityChecksDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MachineLearningAvailabilityChecksDto-objects as value to a dart map
static Map<String, List<MachineLearningAvailabilityChecksDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MachineLearningAvailabilityChecksDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MachineLearningAvailabilityChecksDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'enabled',
'interval',
'timeout',
};
}

View File

@@ -13,16 +13,14 @@ part of openapi.api;
class SystemConfigMachineLearningDto {
/// Returns a new [SystemConfigMachineLearningDto] instance.
SystemConfigMachineLearningDto({
required this.availabilityChecks,
required this.clip,
required this.duplicateDetection,
required this.enabled,
required this.facialRecognition,
this.url,
this.urls = const [],
});
MachineLearningAvailabilityChecksDto availabilityChecks;
CLIPConfig clip;
DuplicateDetectionConfig duplicateDetection;
@@ -31,37 +29,50 @@ class SystemConfigMachineLearningDto {
FacialRecognitionConfig facialRecognition;
/// This property was deprecated in v1.122.0
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? url;
List<String> urls;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigMachineLearningDto &&
other.availabilityChecks == availabilityChecks &&
other.clip == clip &&
other.duplicateDetection == duplicateDetection &&
other.enabled == enabled &&
other.facialRecognition == facialRecognition &&
other.url == url &&
_deepEquality.equals(other.urls, urls);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(availabilityChecks.hashCode) +
(clip.hashCode) +
(duplicateDetection.hashCode) +
(enabled.hashCode) +
(facialRecognition.hashCode) +
(url == null ? 0 : url!.hashCode) +
(urls.hashCode);
@override
String toString() => 'SystemConfigMachineLearningDto[availabilityChecks=$availabilityChecks, clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, urls=$urls]';
String toString() => 'SystemConfigMachineLearningDto[clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, url=$url, urls=$urls]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'availabilityChecks'] = this.availabilityChecks;
json[r'clip'] = this.clip;
json[r'duplicateDetection'] = this.duplicateDetection;
json[r'enabled'] = this.enabled;
json[r'facialRecognition'] = this.facialRecognition;
if (this.url != null) {
json[r'url'] = this.url;
} else {
// json[r'url'] = null;
}
json[r'urls'] = this.urls;
return json;
}
@@ -75,11 +86,11 @@ class SystemConfigMachineLearningDto {
final json = value.cast<String, dynamic>();
return SystemConfigMachineLearningDto(
availabilityChecks: MachineLearningAvailabilityChecksDto.fromJson(json[r'availabilityChecks'])!,
clip: CLIPConfig.fromJson(json[r'clip'])!,
duplicateDetection: DuplicateDetectionConfig.fromJson(json[r'duplicateDetection'])!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
facialRecognition: FacialRecognitionConfig.fromJson(json[r'facialRecognition'])!,
url: mapValueOfType<String>(json, r'url'),
urls: json[r'urls'] is Iterable
? (json[r'urls'] as Iterable).cast<String>().toList(growable: false)
: const [],
@@ -130,7 +141,6 @@ class SystemConfigMachineLearningDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'availabilityChecks',
'clip',
'duplicateDetection',
'enabled',

View File

@@ -12259,25 +12259,6 @@
],
"type": "object"
},
"MachineLearningAvailabilityChecksDto": {
"properties": {
"enabled": {
"type": "boolean"
},
"interval": {
"type": "number"
},
"timeout": {
"type": "number"
}
},
"required": [
"enabled",
"interval",
"timeout"
],
"type": "object"
},
"ManualJobName": {
"enum": [
"person-cleanup",
@@ -16414,9 +16395,6 @@
},
"SystemConfigMachineLearningDto": {
"properties": {
"availabilityChecks": {
"$ref": "#/components/schemas/MachineLearningAvailabilityChecksDto"
},
"clip": {
"$ref": "#/components/schemas/CLIPConfig"
},
@@ -16429,6 +16407,11 @@
"facialRecognition": {
"$ref": "#/components/schemas/FacialRecognitionConfig"
},
"url": {
"deprecated": true,
"description": "This property was deprecated in v1.122.0",
"type": "string"
},
"urls": {
"format": "uri",
"items": {
@@ -16440,7 +16423,6 @@
}
},
"required": [
"availabilityChecks",
"clip",
"duplicateDetection",
"enabled",

View File

@@ -1383,11 +1383,6 @@ export type SystemConfigLoggingDto = {
enabled: boolean;
level: LogLevel;
};
export type MachineLearningAvailabilityChecksDto = {
enabled: boolean;
interval: number;
timeout: number;
};
export type ClipConfig = {
enabled: boolean;
modelName: string;
@@ -1404,11 +1399,12 @@ export type FacialRecognitionConfig = {
modelName: string;
};
export type SystemConfigMachineLearningDto = {
availabilityChecks: MachineLearningAvailabilityChecksDto;
clip: ClipConfig;
duplicateDetection: DuplicateDetectionConfig;
enabled: boolean;
facialRecognition: FacialRecognitionConfig;
/** This property was deprecated in v1.122.0 */
url?: string;
urls: string[];
};
export type SystemConfigMapDto = {

View File

@@ -3,7 +3,7 @@
"version": "0.0.1",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.15.1+sha512.34e538c329b5553014ca8e8f4535997f96180a1d0f614339357449935350d924e22f8614682191264ec33d1462ac21561aff97f6bb18065351c162c7e8f6de67",
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
"engines": {
"pnpm": ">=10.0.0"
}

2604
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
FROM ghcr.io/immich-app/base-server-dev:pr-272 AS builder
FROM ghcr.io/immich-app/base-server-dev:202509091104@sha256:4f9275330f1e49e7ce9840758ea91839052fe6ed40972d5bb97a9af857fa956a AS builder
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
CI=1 \
COREPACK_HOME=/tmp
@@ -33,7 +33,7 @@ RUN pnpm --filter @immich/sdk --filter @immich/cli --frozen-lockfile install &&
pnpm --filter @immich/sdk --filter @immich/cli build && \
pnpm --filter @immich/cli --prod --no-optional deploy /output/cli-pruned
FROM ghcr.io/immich-app/base-server-prod:pr-272
FROM ghcr.io/immich-app/base-server-prod:202509091104@sha256:d1ccbac24c84f2f8277cf85281edfca62d85d7daed6a62b8efd3a81bcd3c5e0e
WORKDIR /usr/src/app
ENV NODE_ENV=production \

View File

@@ -44,14 +44,14 @@
"@nestjs/websockets": "^11.0.4",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^2.0.0",
"@opentelemetry/exporter-prometheus": "^0.205.0",
"@opentelemetry/instrumentation-http": "^0.205.0",
"@opentelemetry/instrumentation-ioredis": "^0.53.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.51.0",
"@opentelemetry/instrumentation-pg": "^0.58.0",
"@opentelemetry/exporter-prometheus": "^0.203.0",
"@opentelemetry/instrumentation-http": "^0.203.0",
"@opentelemetry/instrumentation-ioredis": "^0.51.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.49.0",
"@opentelemetry/instrumentation-pg": "^0.56.0",
"@opentelemetry/resources": "^2.0.1",
"@opentelemetry/sdk-metrics": "^2.0.1",
"@opentelemetry/sdk-node": "^0.205.0",
"@opentelemetry/sdk-node": "^0.203.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@react-email/components": "^0.5.0",
"@react-email/render": "^1.1.2",

View File

@@ -15,7 +15,6 @@ import { repositories } from 'src/repositories';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { AuthService } from 'src/services/auth.service';
import { getKyselyConfig } from 'src/utils/database';
@@ -58,7 +57,7 @@ class SqlGenerator {
try {
await this.setup();
for (const Repository of repositories) {
if (Repository === LoggingRepository || Repository === MachineLearningRepository) {
if (Repository === LoggingRepository) {
continue;
}
await this.process(Repository);

View File

@@ -54,11 +54,6 @@ export interface SystemConfig {
machineLearning: {
enabled: boolean;
urls: string[];
availabilityChecks: {
enabled: boolean;
timeout: number;
interval: number;
};
clip: {
enabled: boolean;
modelName: string;
@@ -181,8 +176,6 @@ export interface SystemConfig {
};
}
export type MachineLearningConfig = SystemConfig['machineLearning'];
export const defaults = Object.freeze<SystemConfig>({
backup: {
database: {
@@ -234,11 +227,6 @@ export const defaults = Object.freeze<SystemConfig>({
machineLearning: {
enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false',
urls: [process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
timeout: Number(process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT) || 2000,
interval: 30_000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -51,6 +51,11 @@ export const serverVersion = new SemVer(version);
export const AUDIT_LOG_MAX_DURATION = Duration.fromObject({ days: 100 });
export const ONE_HOUR = Duration.fromObject({ hours: 1 });
export const MACHINE_LEARNING_PING_TIMEOUT = Number(process.env.MACHINE_LEARNING_PING_TIMEOUT || 2000);
export const MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME = Number(
process.env.MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME || 30_000,
);
export const citiesFile = 'cities500.txt';
export const reverseGeocodeMaxDistance = 25_000;

View File

@@ -6,7 +6,7 @@ import { PropertyLifecycle } from 'src/decorators';
import { AlbumResponseDto } from 'src/dtos/album.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetOrder, AssetType, AssetVisibility } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateString, ValidateUUID } from 'src/validation';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } from 'src/validation';
class BaseSearchDto {
@ValidateUUID({ optional: true, nullable: true })
@@ -144,7 +144,9 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
deviceAssetId?: string;
@ValidateString({ optional: true, trim: true })
@IsString()
@IsNotEmpty()
@Optional()
description?: string;
@IsString()
@@ -152,7 +154,9 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
checksum?: string;
@ValidateString({ optional: true, trim: true })
@IsString()
@IsNotEmpty()
@Optional()
originalFileName?: string;
@IsString()
@@ -186,12 +190,16 @@ export class MetadataSearchDto extends RandomSearchDto {
}
export class StatisticsSearchDto extends BaseSearchDto {
@ValidateString({ optional: true, trim: true })
@IsString()
@IsNotEmpty()
@Optional()
description?: string;
}
export class SmartSearchDto extends BaseSearchWithResultsDto {
@ValidateString({ optional: true, trim: true })
@IsString()
@IsNotEmpty()
@Optional()
query?: string;
@ValidateUUID({ optional: true })

View File

@@ -1,5 +1,5 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { Exclude, Transform, Type } from 'class-transformer';
import {
ArrayMinSize,
IsInt,
@@ -15,6 +15,7 @@ import {
ValidateNested,
} from 'class-validator';
import { SystemConfig } from 'src/config';
import { PropertyLifecycle } from 'src/decorators';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
import {
AudioCodec,
@@ -256,32 +257,21 @@ class SystemConfigLoggingDto {
level!: LogLevel;
}
class MachineLearningAvailabilityChecksDto {
@ValidateBoolean()
enabled!: boolean;
@IsInt()
timeout!: number;
@IsInt()
interval!: number;
}
class SystemConfigMachineLearningDto {
@ValidateBoolean()
enabled!: boolean;
@PropertyLifecycle({ deprecatedAt: 'v1.122.0' })
@Exclude()
url?: string;
@IsUrl({ require_tld: false, allow_underscores: true }, { each: true })
@ArrayMinSize(1)
@Transform(({ obj, value }) => (obj.url ? [obj.url] : value))
@ValidateIf((dto) => dto.enabled)
@ApiProperty({ type: 'array', items: { type: 'string', format: 'uri' }, minItems: 1 })
urls!: string[];
@Type(() => MachineLearningAvailabilityChecksDto)
@ValidateNested()
@IsObject()
availabilityChecks!: MachineLearningAvailabilityChecksDto;
@Type(() => CLIPConfig)
@ValidateNested()
@IsObject()

View File

@@ -142,10 +142,6 @@ export class LoggingRepository {
this.handleMessage(LogLevel.Fatal, message, details);
}
deprecate(message: string) {
this.warn(`[Deprecated] ${message}`);
}
private handleFunction(level: LogLevel, message: LogFunction, details: LogDetails[]) {
if (this.logger.isLevelEnabled(level)) {
this.handleMessage(level, message(), details);

View File

@@ -1,7 +1,6 @@
import { Injectable } from '@nestjs/common';
import { Duration } from 'luxon';
import { readFile } from 'node:fs/promises';
import { MachineLearningConfig } from 'src/config';
import { MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME, MACHINE_LEARNING_PING_TIMEOUT } from 'src/constants';
import { CLIPConfig } from 'src/dtos/model-config.dto';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -58,100 +57,82 @@ export type TextEncodingOptions = ModelOptions & { language?: string };
@Injectable()
export class MachineLearningRepository {
private healthyMap: Record<string, boolean> = {};
private interval?: ReturnType<typeof setInterval>;
private _config?: MachineLearningConfig;
private get config(): MachineLearningConfig {
if (!this._config) {
throw new Error('Machine learning repository not been setup');
}
return this._config;
}
// Note that deleted URL's are not removed from this map (ie: they're leaked)
// Cleaning them up is low priority since there should be very few over a
// typical server uptime cycle
private urlAvailability: {
[url: string]:
| {
active: boolean;
lastChecked: number;
}
| undefined;
};
constructor(private logger: LoggingRepository) {
this.logger.setContext(MachineLearningRepository.name);
this.urlAvailability = {};
}
setup(config: MachineLearningConfig) {
this._config = config;
this.teardown();
// delete old servers
for (const url of Object.keys(this.healthyMap)) {
if (!config.urls.includes(url)) {
delete this.healthyMap[url];
}
private setUrlAvailability(url: string, active: boolean) {
const current = this.urlAvailability[url];
if (current?.active !== active) {
this.logger.verbose(`Setting ${url} ML server to ${active ? 'active' : 'inactive'}.`);
}
if (!config.availabilityChecks.enabled) {
return;
}
this.tick();
this.interval = setInterval(
() => this.tick(),
Duration.fromObject({ milliseconds: config.availabilityChecks.interval }).as('milliseconds'),
);
this.urlAvailability[url] = {
active,
lastChecked: Date.now(),
};
}
teardown() {
if (this.interval) {
clearInterval(this.interval);
}
}
private tick() {
for (const url of this.config.urls) {
void this.check(url);
}
}
private async check(url: string) {
let healthy = false;
private async checkAvailability(url: string) {
let active = false;
try {
const response = await fetch(new URL('/ping', url), {
signal: AbortSignal.timeout(this.config.availabilityChecks.timeout),
signal: AbortSignal.timeout(MACHINE_LEARNING_PING_TIMEOUT),
});
if (response.ok) {
healthy = true;
}
active = response.ok;
} catch {
// nothing to do here
}
this.setHealthy(url, healthy);
this.setUrlAvailability(url, active);
return active;
}
private setHealthy(url: string, healthy: boolean) {
if (this.healthyMap[url] !== healthy) {
this.logger.log(`Machine learning server became ${healthy ? 'healthy' : 'unhealthy'} (${url}).`);
private async shouldSkipUrl(url: string) {
const availability = this.urlAvailability[url];
if (availability === undefined) {
// If this is a new endpoint, then check inline and skip if it fails
if (!(await this.checkAvailability(url))) {
return true;
}
return false;
}
this.healthyMap[url] = healthy;
}
private isHealthy(url: string) {
if (!this.config.availabilityChecks.enabled) {
if (!availability.active && Date.now() - availability.lastChecked < MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME) {
// If this is an old inactive endpoint that hasn't been checked in a
// while then check but don't wait for the result, just skip it
// This avoids delays on every search whilst allowing higher priority
// ML servers to recover over time.
void this.checkAvailability(url);
return true;
}
return this.healthyMap[url];
return false;
}
private async predict<T>(payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
private async predict<T>(urls: string[], payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
const formData = await this.getFormData(payload, config);
let urlCounter = 0;
for (const url of urls) {
urlCounter++;
const isLast = urlCounter >= urls.length;
if (!isLast && (await this.shouldSkipUrl(url))) {
continue;
}
for (const url of [
// try healthy servers first
...this.config.urls.filter((url) => this.isHealthy(url)),
...this.config.urls.filter((url) => !this.isHealthy(url)),
]) {
try {
const response = await fetch(new URL('/predict', url), { method: 'POST', body: formData });
if (response.ok) {
this.setHealthy(url, true);
this.setUrlAvailability(url, true);
return response.json();
}
@@ -163,21 +144,20 @@ export class MachineLearningRepository {
`Machine learning request to "${url}" failed: ${error instanceof Error ? error.message : error}`,
);
}
this.setHealthy(url, false);
this.setUrlAvailability(url, false);
}
throw new Error(`Machine learning request '${JSON.stringify(config)}' failed for all URLs`);
}
async detectFaces(imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
async detectFaces(urls: string[], imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
const request = {
[ModelTask.FACIAL_RECOGNITION]: {
[ModelType.DETECTION]: { modelName, options: { minScore } },
[ModelType.RECOGNITION]: { modelName },
},
};
const response = await this.predict<FacialRecognitionResponse>({ imagePath }, request);
const response = await this.predict<FacialRecognitionResponse>(urls, { imagePath }, request);
return {
imageHeight: response.imageHeight,
imageWidth: response.imageWidth,
@@ -185,15 +165,15 @@ export class MachineLearningRepository {
};
}
async encodeImage(imagePath: string, { modelName }: CLIPConfig) {
async encodeImage(urls: string[], imagePath: string, { modelName }: CLIPConfig) {
const request = { [ModelTask.SEARCH]: { [ModelType.VISUAL]: { modelName } } };
const response = await this.predict<ClipVisualResponse>({ imagePath }, request);
const response = await this.predict<ClipVisualResponse>(urls, { imagePath }, request);
return response[ModelTask.SEARCH];
}
async encodeText(text: string, { language, modelName }: TextEncodingOptions) {
async encodeText(urls: string[], text: string, { language, modelName }: TextEncodingOptions) {
const request = { [ModelTask.SEARCH]: { [ModelType.TEXTUAL]: { modelName, options: { language } } } };
const response = await this.predict<ClipTextualResponse>({ text }, request);
const response = await this.predict<ClipTextualResponse>(urls, { text }, request);
return response[ModelTask.SEARCH];
}

View File

@@ -729,6 +729,7 @@ describe(PersonService.name, () => {
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(mocks.machineLearning.detectFaces).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ minScore: 0.7, modelName: 'buffalo_l' }),
);

View File

@@ -316,6 +316,7 @@ export class PersonService extends BaseService {
}
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
machineLearning.urls,
previewFile.path,
machineLearning.facialRecognition,
);

View File

@@ -211,6 +211,7 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -224,6 +225,7 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', page: 2, size: 50 });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -241,6 +243,7 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: 'ViT-B-16-SigLIP__webli' }),
);
@@ -250,6 +253,7 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', language: 'de' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ language: 'de' }),
);

View File

@@ -118,7 +118,7 @@ export class SearchService extends BaseService {
const key = machineLearning.clip.modelName + dto.query + dto.language;
embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(dto.query, {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});

View File

@@ -205,6 +205,7 @@ describe(SmartInfoService.name, () => {
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);
@@ -241,6 +242,7 @@ describe(SmartInfoService.name, () => {
expect(mocks.database.wait).toHaveBeenCalledWith(512);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);

View File

@@ -108,7 +108,11 @@ export class SmartInfoService extends BaseService {
return JobStatus.Skipped;
}
const embedding = await this.machineLearningRepository.encodeImage(asset.files[0].path, machineLearning.clip);
const embedding = await this.machineLearningRepository.encodeImage(
machineLearning.urls,
asset.files[0].path,
machineLearning.clip,
);
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);

View File

@@ -82,11 +82,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
machineLearning: {
enabled: true,
urls: ['http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
interval: 30_000,
timeout: 2000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -16,20 +16,6 @@ export class SystemConfigService extends BaseService {
async onBootstrap() {
const config = await this.getConfig({ withCache: false });
await this.eventRepository.emit('ConfigInit', { newConfig: config });
if (
process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT ||
process.env.IMMICH_MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME
) {
this.logger.deprecate(
'IMMICH_MACHINE_LEARNING_PING_TIMEOUT and MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME have been moved to system config(`machineLearning.availabilityChecks`) and will be removed in a future release.',
);
}
}
@OnEvent({ name: 'AppShutdown' })
onShutdown() {
this.machineLearningRepository.teardown();
}
async getSystemConfig(): Promise<SystemConfigDto> {
@@ -42,14 +28,12 @@ export class SystemConfigService extends BaseService {
}
@OnEvent({ name: 'ConfigInit', priority: -100 })
onConfigInit({ newConfig: { logging, machineLearning } }: ArgOf<'ConfigInit'>) {
onConfigInit({ newConfig: { logging } }: ArgOf<'ConfigInit'>) {
const { logLevel: envLevel } = this.configRepository.getEnv();
const configLevel = logging.enabled ? logging.level : false;
const level = envLevel ?? configLevel;
this.logger.setLogLevel(level);
this.logger.log(`LogLevel=${level} ${envLevel ? '(set via IMMICH_LOG_LEVEL)' : '(set via system config)'}`);
this.machineLearningRepository.setup(machineLearning);
}
@OnEvent({ name: 'ConfigUpdate', server: true })

View File

@@ -211,18 +211,6 @@ export const ValidateDate = (options?: DateOptions & ApiPropertyOptions) => {
return applyDecorators(...decorators);
};
type StringOptions = { optional?: boolean; nullable?: boolean; trim?: boolean };
export const ValidateString = (options?: StringOptions & ApiPropertyOptions) => {
const { optional, nullable, trim, ...apiPropertyOptions } = options || {};
const decorators = [ApiProperty(apiPropertyOptions), IsString(), optional ? Optional({ nullable }) : IsNotEmpty()];
if (trim) {
decorators.push(Transform(({ value }: { value: string }) => value?.trim()));
}
return applyDecorators(...decorators);
};
type BooleanOptions = { optional?: boolean; nullable?: boolean };
export const ValidateBoolean = (options?: BooleanOptions & ApiPropertyOptions) => {
const { optional, nullable, ...apiPropertyOptions } = options || {};

View File

@@ -127,7 +127,6 @@ export default typescriptEslint.config(
'@typescript-eslint/no-misused-promises': 'error',
'@typescript-eslint/require-await': 'error',
'object-shorthand': ['error', 'always'],
'svelte/no-navigation-without-resolve': 'off',
},
},
{

View File

@@ -55,7 +55,7 @@
"qrcode": "^1.5.4",
"simple-icons": "^15.15.0",
"socket.io-client": "~4.8.0",
"svelte-gestures": "5.1.4",
"svelte-gestures": "^5.1.3",
"svelte-i18n": "^4.0.1",
"svelte-maplibre": "^1.2.0",
"svelte-persisted-store": "^0.12.0",
@@ -70,7 +70,7 @@
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/enhanced-img": "^0.8.0",
"@sveltejs/kit": "^2.27.1",
"@sveltejs/vite-plugin-svelte": "6.2.0",
"@sveltejs/vite-plugin-svelte": "6.1.2",
"@tailwindcss/vite": "^4.1.7",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/svelte": "^5.2.8",
@@ -85,7 +85,7 @@
"dotenv": "^17.0.0",
"eslint": "^9.18.0",
"eslint-config-prettier": "^10.1.8",
"eslint-p": "^0.26.0",
"eslint-p": "^0.25.0",
"eslint-plugin-compat": "^6.0.2",
"eslint-plugin-svelte": "^3.9.0",
"eslint-plugin-unicorn": "^60.0.0",
@@ -97,7 +97,7 @@
"prettier-plugin-sort-json": "^4.1.1",
"prettier-plugin-svelte": "^3.3.3",
"rollup-plugin-visualizer": "^6.0.0",
"svelte": "5.38.10",
"svelte": "5.35.5",
"svelte-check": "^4.1.5",
"svelte-eslint-parser": "^1.2.0",
"tailwindcss": "^4.1.7",

View File

@@ -9,7 +9,7 @@
import { featureFlags } from '$lib/stores/server-config.store';
import type { SystemConfigDto } from '@immich/sdk';
import { Button, IconButton } from '@immich/ui';
import { mdiPlus, mdiTrashCanOutline } from '@mdi/js';
import { mdiMinusCircle } from '@mdi/js';
import { isEqual } from 'lodash-es';
import { t } from 'svelte-i18n';
import { fade } from 'svelte/transition';
@@ -46,6 +46,19 @@
<div>
{#each config.machineLearning.urls as _, i (i)}
{#snippet removeButton()}
{#if config.machineLearning.urls.length > 1}
<IconButton
size="large"
shape="round"
color="danger"
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiMinusCircle}
/>
{/if}
{/snippet}
<SettingInputField
inputType={SettingInputFieldType.TEXT}
label={i === 0 ? $t('url') : undefined}
@@ -54,69 +67,20 @@
required={i === 0}
disabled={disabled || !config.machineLearning.enabled}
isEdited={i === 0 && !isEqual(config.machineLearning.urls, savedConfig.machineLearning.urls)}
>
{#snippet trailingSnippet()}
{#if config.machineLearning.urls.length > 1}
<IconButton
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiTrashCanOutline}
color="danger"
/>
{/if}
{/snippet}
</SettingInputField>
trailingSnippet={removeButton}
/>
{/each}
</div>
<div class="flex justify-end">
<Button
class="mb-2"
size="small"
shape="round"
leadingIcon={mdiPlus}
onclick={() => config.machineLearning.urls.push('')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
</div>
<Button
class="mb-2"
size="small"
shape="round"
onclick={() => config.machineLearning.urls.splice(0, 0, '')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
</div>
<SettingAccordion
key="availability-checks"
title={$t('admin.machine_learning_availability_checks')}
subtitle={$t('admin.machine_learning_availability_checks_description')}
>
<div class="ms-4 mt-4 flex flex-col gap-4">
<SettingSwitch
title={$t('admin.machine_learning_availability_checks_enabled')}
bind:checked={config.machineLearning.availabilityChecks.enabled}
disabled={disabled || !config.machineLearning.enabled}
/>
<hr />
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_interval')}
bind:value={config.machineLearning.availabilityChecks.interval}
description={$t('admin.machine_learning_availability_checks_interval_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.interval !==
savedConfig.machineLearning.availabilityChecks.interval}
/>
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_timeout')}
bind:value={config.machineLearning.availabilityChecks.timeout}
description={$t('admin.machine_learning_availability_checks_timeout_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.timeout !==
savedConfig.machineLearning.availabilityChecks.timeout}
/>
</div>
</SettingAccordion>
<SettingAccordion
key="smart-search"
title={$t('admin.machine_learning_smart_search')}

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { resolve } from '$app/paths';
import SupportedDatetimePanel from '$lib/components/admin-settings/SupportedDatetimePanel.svelte';
import SupportedVariablesPanel from '$lib/components/admin-settings/SupportedVariablesPanel.svelte';
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
@@ -263,7 +262,7 @@
values={{ job: $t('admin.storage_template_migration_job') }}
>
{#snippet children({ message })}
<a href={resolve(AppRoute.ADMIN_JOBS)} class="text-primary">
<a href={AppRoute.ADMIN_JOBS} class="text-primary">
{message}
</a>
{/snippet}

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { resolve } from '$app/paths';
import AlbumCard from '$lib/components/album-page/album-card.svelte';
import { AppRoute } from '$lib/constants';
import { albumViewSettings } from '$lib/stores/preferences.store';
@@ -66,7 +65,7 @@
{#each albums as album, index (album.id)}
<a
data-sveltekit-preload-data="hover"
href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}
href="{AppRoute.ALBUMS}/{album.id}"
animate:flip={{ duration: 400 }}
oncontextmenu={(event) => oncontextmenu(event, album)}
>

View File

@@ -1,6 +1,5 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import AlbumCardGroup from '$lib/components/album-page/album-card-group.svelte';
import AlbumsTable from '$lib/components/album-page/albums-table.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
@@ -316,7 +315,7 @@
button: {
text: $t('view_album'),
onClick() {
return goto(resolve(`${AppRoute.ALBUMS}/${album.id}`));
return goto(`${AppRoute.ALBUMS}/${album.id}`);
},
},
});

View File

@@ -1,6 +1,5 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import { AppRoute, dateFormats } from '$lib/constants';
import { locale } from '$lib/stores/preferences.store';
import { user } from '$lib/stores/user.store';
@@ -33,7 +32,7 @@
<tr
class="flex h-[50px] w-full place-items-center border-[3px] border-transparent p-2 text-center even:bg-subtle/20 odd:bg-subtle/80 hover:cursor-pointer hover:border-immich-primary/75 odd:dark:bg-immich-dark-gray/75 even:dark:bg-immich-dark-gray/50 dark:hover:border-immich-dark-primary/75 md:p-5"
onclick={() => goto(resolve(`${AppRoute.ALBUMS}/${album.id}`))}
onclick={() => goto(`${AppRoute.ALBUMS}/${album.id}`)}
{oncontextmenu}
>
<td class="text-md text-ellipsis text-start w-8/12 sm:w-4/12 md:w-4/12 xl:w-[30%] 2xl:w-[40%] items-center">

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { autoGrowHeight } from '$lib/actions/autogrow';
import { shortcut } from '$lib/actions/shortcut';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
@@ -147,10 +146,7 @@
<div class="w-full leading-4 overflow-hidden self-center break-words text-sm">{reaction.comment}</div>
{#if assetId === undefined && reaction.assetId}
<a
class="aspect-square w-[75px] h-[75px]"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
>
<a class="aspect-square w-[75px] h-[75px]" href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}">
<img
class="rounded-lg w-[75px] h-[75px] object-cover"
src={getAssetThumbnailUrl(reaction.assetId)}
@@ -202,7 +198,7 @@
{#if assetId === undefined && reaction.assetId}
<a
class="aspect-square w-[75px] h-[75px]"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}"
>
<img
class="rounded-lg w-[75px] h-[75px] object-cover"

View File

@@ -1,6 +1,5 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import CastButton from '$lib/cast/cast-button.svelte';
import type { OnAction, PreAction } from '$lib/components/asset-viewer/actions/action';
import AddToAlbumAction from '$lib/components/asset-viewer/actions/add-to-album-action.svelte';
@@ -225,15 +224,14 @@
{#if !asset.isArchived && !asset.isTrashed}
<MenuOption
icon={mdiImageSearch}
onClick={() => goto(resolve(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`))}
onClick={() => goto(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`)}
text={$t('view_in_timeline')}
/>
{/if}
{#if !asset.isArchived && !asset.isTrashed && smartSearchEnabled}
<MenuOption
icon={mdiCompare}
onClick={() =>
goto(resolve(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`))}
onClick={() => goto(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`)}
text={$t('view_similar_photos')}
/>
{/if}

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { shortcut } from '$lib/actions/shortcut';
import { AppRoute } from '$lib/constants';
import { authManager } from '$lib/managers/auth-manager.svelte';
@@ -46,7 +45,7 @@
<div class="flex group transition-all">
<a
class="inline-block h-min whitespace-nowrap ps-3 pe-1 group-hover:ps-3 py-1 text-center align-baseline leading-none text-gray-100 dark:text-immich-dark-gray bg-primary rounded-s-full hover:bg-immich-primary/80 dark:hover:bg-immich-dark-primary/80 transition-all"
href={resolve(`${AppRoute.TAGS}/?path=${encodeURI(tag.value)}`)}
href={encodeURI(`${AppRoute.TAGS}/?path=${tag.value}`)}
>
<p class="text-sm">
{tag.value}

View File

@@ -1,6 +1,5 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import DetailPanelDescription from '$lib/components/asset-viewer/detail-panel-description.svelte';
import DetailPanelLocation from '$lib/components/asset-viewer/detail-panel-location.svelte';
import DetailPanelRating from '$lib/components/asset-viewer/detail-panel-star-rating.svelte';
@@ -209,11 +208,9 @@
{#if showingHiddenPeople || !person.isHidden}
<a
class="w-[90px]"
href={resolve(
`${AppRoute.PEOPLE}/${person.id}?${QueryParameter.PREVIOUS_ROUTE}=${
currentAlbum?.id ? `${AppRoute.ALBUMS}/${currentAlbum?.id}` : AppRoute.PHOTOS
}`,
)}
href="{AppRoute.PEOPLE}/{person.id}?{QueryParameter.PREVIOUS_ROUTE}={currentAlbum?.id
? `${AppRoute.ALBUMS}/${currentAlbum?.id}`
: AppRoute.PHOTOS}"
onfocus={() => ($boundingBoxesArray = people[index].faces)}
onblur={() => ($boundingBoxesArray = [])}
onmouseover={() => ($boundingBoxesArray = people[index].faces)}
@@ -365,7 +362,6 @@
</p>
{#if showAssetPath}
<p class="text-xs opacity-50 break-all pb-2 hover:text-primary" transition:slide={{ duration: 250 }}>
<!-- eslint-disable-next-line svelte/no-navigation-without-resolve this is supposed to be treated as an absolute/external link -->
<a href={getAssetFolderHref(asset)} title={$t('go_to_folder')} class="whitespace-pre-wrap">
{asset.originalPath}
</a>
@@ -398,12 +394,10 @@
{#if asset.exifInfo?.make || asset.exifInfo?.model}
<p>
<a
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}`,
)}
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}"
title="{$t('search_for')} {asset.exifInfo.make || ''} {asset.exifInfo.model || ''}"
class="hover:text-primary"
>
@@ -417,9 +411,7 @@
<div class="flex gap-2 text-sm">
<p>
<a
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}`,
)}
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}"
title="{$t('search_for')} {asset.exifInfo.lensModel}"
class="hover:text-primary line-clamp-1"
>
@@ -483,7 +475,7 @@
simplified
useLocationPin
showSimpleControls={!showEditFaces}
onOpenInMapView={() => goto(resolve(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`))}
onOpenInMapView={() => goto(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`)}
>
{#snippet popup({ marker })}
{@const { lat, lon } = marker}
@@ -524,7 +516,7 @@
<section class="px-6 pt-6 dark:text-immich-dark-fg">
<p class="uppercase pb-4 text-sm">{$t('appears_in')}</p>
{#each albums as album (album.id)}
<a href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}>
<a href="{AppRoute.ALBUMS}/{album.id}">
<div class="flex gap-4 pt-2 hover:cursor-pointer items-center">
<div>
<img

View File

@@ -97,15 +97,12 @@
}
try {
const result = await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
if (result.success) {
notificationController.show({ type: NotificationType.Info, message: $t('copied_image_to_clipboard') });
} else {
notificationController.show({
type: NotificationType.Error,
message: $t('errors.clipboard_unsupported_mime_type', { values: { mimeType: result.mimeType } }),
});
}
await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
notificationController.show({
type: NotificationType.Info,
message: $t('copied_image_to_clipboard'),
timeout: 3000,
});
} catch (error) {
handleError(error, $t('copy_error'));
}

View File

@@ -7,7 +7,6 @@
<script lang="ts">
import DateInput from '$lib/elements/DateInput.svelte';
import { Text } from '@immich/ui';
import { t } from 'svelte-i18n';
interface Props {
@@ -15,27 +14,31 @@
}
let { filters = $bindable() }: Props = $props();
let invalid = $derived(filters.takenAfter && filters.takenBefore && filters.takenAfter > filters.takenBefore);
const inputClasses = $derived(
`immich-form-input w-full mt-1 hover:cursor-pointer ${invalid ? 'border border-danger' : ''}`,
);
</script>
<div class="flex flex-col gap-1">
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput class={inputClasses} type="date" id="start-date" name="start-date" bind:value={filters.takenAfter} />
</label>
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="start-date"
name="start-date"
max={filters.takenBefore}
bind:value={filters.takenAfter}
/>
</label>
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput class={inputClasses} type="date" id="end-date" name="end-date" bind:value={filters.takenBefore} />
</label>
</div>
{#if invalid}
<Text color="danger">{$t('start_date_before_end_date')}</Text>
{/if}
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="end-date"
name="end-date"
placeholder=""
min={filters.takenAfter}
bind:value={filters.takenBefore}
/>
</label>
</div>

View File

@@ -105,7 +105,7 @@
{/if}
{#if inputType !== SettingInputFieldType.PASSWORD}
<div class="flex place-items-center place-content-center gap-2">
<div class="flex place-items-center place-content-center">
{#if inputType === SettingInputFieldType.COLOR}
<input
bind:this={input}

View File

@@ -17,9 +17,6 @@ describe('RecentAlbums component', () => {
render(RecentAlbums);
expect(sdkMock.getAllAlbums).toBeCalledTimes(1);
// wtf
await tick();
await tick();
const links = screen.getAllByRole('link');

View File

@@ -625,21 +625,7 @@ const urlToBlob = async (imageSource: string) => {
return await response.blob();
};
export const copyImageToClipboard = async (
source: HTMLImageElement | string,
): Promise<{ success: true } | { success: false; mimeType: string }> => {
if (source instanceof HTMLImageElement) {
// do not await, so the Safari clipboard write happens in the context of the user gesture
await navigator.clipboard.write([new ClipboardItem({ ['image/png']: imgToBlob(source) })]);
return { success: true };
}
// if we had a way to get the mime type synchronously, we could do the same thing here
const blob = await urlToBlob(source);
if (!ClipboardItem.supports(blob.type)) {
return { success: false, mimeType: blob.type };
}
export const copyImageToClipboard = async (source: HTMLImageElement | string) => {
const blob = source instanceof HTMLImageElement ? await imgToBlob(source) : await urlToBlob(source);
await navigator.clipboard.write([new ClipboardItem({ [blob.type]: blob })]);
return { success: true };
};