Compare commits

..

33 Commits

Author SHA1 Message Date
Jason Rasmussen
fd0a12df37 feat: context menus 2025-11-10 09:27:21 -05:00
Mert
6913697ad1 feat(ml): multilingual ocr (#23527)
* handle other languages in ml server

* add variants to model selector

* no need to override path

* unused import
2025-11-06 12:58:41 -05:00
Mert
a4ae86ce29 feat(ml): add preload and fp16 settings for ocr (#23576) 2025-11-06 17:55:11 +00:00
Snowknight26
2c50f2e244 fix(web): add URLs to results in large files utility (#23617)
fix(web): add URLs to results in large files
2025-11-06 09:24:47 -05:00
shenlong
365abd8906 fix: check if unmetered instead of wifi (#23380)
Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-11-05 13:27:38 -06:00
Alex
25fb43bbe3 fix: fully sync local library on app restart (#23323) 2025-11-05 14:09:50 +00:00
bo0tzz
125e8cee01 chore: update config.json example (#23471)
* chore: update config.json example

closes #23465

* fix: format, for real this time
2025-11-05 08:05:53 -06:00
Arnaud Wery
c15e9bfa72 fix(web): "select all" button in trash and permanently deleted count (#23594) 2025-11-05 14:05:48 +00:00
Dmitry
35e188e6e7 docs: sync ru docs with main README.md (#23627) 2025-11-05 08:05:03 -06:00
Sergey Katsubo
3cc9dd126c fix(web): fix timezone dropdown for timestamps lacking milliseconds (#23615)
Fix timezone selector for timestamps without milliseconds
2025-11-05 08:03:55 -06:00
Jason Rasmussen
aa69d89b9f fix: bad merge (#23610) 2025-11-04 16:22:45 -05:00
Jason Rasmussen
29c14a3f58 refactor: database column names (#23356) 2025-11-04 16:03:21 -05:00
Jason Rasmussen
0df70365d7 feat: exif medium tests (#23561) 2025-11-04 16:03:02 -05:00
Mees Frensel
c34be73d81 fix(web): consistently use mdiMotionPauseOutline icon (#23595) 2025-11-04 12:12:47 +01:00
renovate[bot]
f396e9e374 chore(deps): update prom/prometheus docker digest to 4921475 (#23578)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:49:12 +01:00
renovate[bot]
821a9d4691 chore(deps): update redis:6.2-alpine docker digest to 37e0024 (#23579)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:48:21 +01:00
renovate[bot]
cad654586f chore(deps): update dependency @types/node to ^22.18.13 (#23581)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:47:54 +01:00
github-actions
28eb1bc13c chore: version v2.2.3 2025-11-04 03:14:34 +00:00
Brandon Wees
1e4779cf48 fix(mobile): ignore patch releases for app version alerts (#23565)
* fix(mobile): ignore patch releases for app version alerts

* chore: make difference type nullable to indicate when versions match

* chore: add error handling for semver parsing

* chore: tests
2025-11-03 21:09:32 -06:00
Sergey Katsubo
0647c22956 fix(mobile): handle empty original filename (#23469)
* Handle empty original filename

* Handle TypeError from photo_manager titleAsync

* More compact exception log
2025-11-03 21:09:18 -06:00
Alex
b8087b4fa2 chore: ios prod build with correct argument, get version number from pubspec (#23554)
* chore: ios prod build with correct argument, get version number from pubspec

* Update mobile/ios/fastlane/Fastfile

Co-authored-by: bo0tzz <git@bo0tzz.me>

---------

Co-authored-by: bo0tzz <git@bo0tzz.me>
2025-11-03 10:11:11 -06:00
Jonathan S
d94cb9641b chore: correct hosted isar paths in fdroid_build_isar.sh (#23529)
This should hopefully unblock F-Droid builds, which are a few versions behind.

Based on the suggestion in https://github.com/immich-app/immich/pull/22757#issuecomment-3404516987
2025-11-03 08:35:56 -06:00
Daniel Dietzler
517c3e1d4c fix: exif gps parsing of malformed data (#23551)
* fix: exif gps parsing of malformed data

* chore: e2e test
2025-11-03 09:02:41 -05:00
Ben
619de2a5e4 fix(web): search bar accessibility (#23550)
* fix: always show search type when search bar is focused

* fix: indicate search type to screen reader users
2025-11-03 08:31:57 -05:00
Mert
79d0e3e1ed fix(ml): ocr inputs not resized correctly (#23541)
* fix resizing, use pillow

* unused import

* linting

* lanczos

* optimizations

fused operations

unused import
2025-11-03 07:21:30 +00:00
github-actions
f5ff36a1f8 chore: version v2.2.2 2025-11-02 21:56:36 +00:00
Alex
b5efc9c16e fix: passing secrets to trigger workflow (#23447)
* fix: passing secrets to trigger workflow

* pass secrets to workflow call
2025-11-02 15:54:35 -06:00
Alex
1036076b0d fix: disable prunning for more investigation (#23531) 2025-11-02 15:54:03 -06:00
Daniel Dietzler
c76324c611 fix(web): mobile scrubber on page load (#23488) 2025-11-01 22:15:33 -05:00
bo0tzz
0ddb92e1ec fix: use pnpm directly for fix-format (#23483) 2025-11-01 15:38:18 -04:00
Alex
d08a520aa2 chore: post release tasks (#23443) 2025-11-01 01:21:39 -05:00
dotlambda
7bdf0f6c50 chore(ml): remove setuptools from dependencies (#23446) 2025-10-31 21:34:10 +00:00
shenlong
2b33a58448 fix: show in timeline from search page (#23440)
Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-10-31 14:55:28 -05:00
131 changed files with 1845 additions and 1867 deletions

View File

@@ -20,6 +20,30 @@ on:
required: true
ANDROID_STORE_PASSWORD:
required: true
APP_STORE_CONNECT_API_KEY_ID:
required: true
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
required: true
APP_STORE_CONNECT_API_KEY:
required: true
IOS_CERTIFICATE_P12:
required: true
IOS_CERTIFICATE_PASSWORD:
required: true
IOS_PROVISIONING_PROFILE:
required: true
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
required: true
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
required: true
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
required: true
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
required: true
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
required: true
FASTLANE_TEAM_ID:
required: true
pull_request:
push:
branches: [main]

View File

@@ -39,7 +39,7 @@ jobs:
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Fix formatting
run: make install-all && make format-all
run: pnpm --recursive install && pnpm run --recursive --parallel fix:format
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4

View File

@@ -99,6 +99,20 @@ jobs:
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
# iOS secrets
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
with:
ref: ${{ needs.bump_version.outputs.ref }}
environment: production

View File

@@ -382,6 +382,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.99",
"version": "2.2.101",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",

View File

@@ -83,7 +83,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus

View File

@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
```sql title="Count by tag"
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
```
```sql title="Count by tag (per user)"
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
```

View File

@@ -16,48 +16,76 @@ The default configuration looks like this:
```json
{
"ffmpeg": {
"crf": 23,
"threads": 0,
"preset": "ultrafast",
"targetVideoCodec": "h264",
"acceptedVideoCodecs": ["h264"],
"targetAudioCodec": "aac",
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
"acceptedContainers": ["mov", "ogg", "webm"],
"targetResolution": "720",
"maxBitrate": "0",
"bframes": -1,
"refs": 0,
"gopSize": 0,
"temporalAQ": false,
"cqMode": "auto",
"twoPass": false,
"preferredHwDevice": "auto",
"transcode": "required",
"tonemap": "hable",
"accel": "disabled",
"accelDecode": false
},
"backup": {
"database": {
"enabled": true,
"cronExpression": "0 02 * * *",
"enabled": true,
"keepLastAmount": 14
}
},
"ffmpeg": {
"accel": "disabled",
"accelDecode": false,
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
"acceptedContainers": ["mov", "ogg", "webm"],
"acceptedVideoCodecs": ["h264"],
"bframes": -1,
"cqMode": "auto",
"crf": 23,
"gopSize": 0,
"maxBitrate": "0",
"preferredHwDevice": "auto",
"preset": "ultrafast",
"refs": 0,
"targetAudioCodec": "aac",
"targetResolution": "720",
"targetVideoCodec": "h264",
"temporalAQ": false,
"threads": 0,
"tonemap": "hable",
"transcode": "required",
"twoPass": false
},
"image": {
"colorspace": "p3",
"extractEmbedded": false,
"fullsize": {
"enabled": false,
"format": "jpeg",
"quality": 80
},
"preview": {
"format": "jpeg",
"quality": 80,
"size": 1440
},
"thumbnail": {
"format": "webp",
"quality": 80,
"size": 250
}
},
"job": {
"backgroundTask": {
"concurrency": 5
},
"smartSearch": {
"faceDetection": {
"concurrency": 2
},
"library": {
"concurrency": 5
},
"metadataExtraction": {
"concurrency": 5
},
"faceDetection": {
"concurrency": 2
"migration": {
"concurrency": 5
},
"notifications": {
"concurrency": 5
},
"ocr": {
"concurrency": 1
},
"search": {
"concurrency": 5
@@ -65,20 +93,23 @@ The default configuration looks like this:
"sidecar": {
"concurrency": 5
},
"library": {
"concurrency": 5
},
"migration": {
"concurrency": 5
"smartSearch": {
"concurrency": 2
},
"thumbnailGeneration": {
"concurrency": 3
},
"videoConversion": {
"concurrency": 1
}
},
"library": {
"scan": {
"cronExpression": "0 0 * * *",
"enabled": true
},
"notifications": {
"concurrency": 5
"watch": {
"enabled": false
}
},
"logging": {
@@ -86,8 +117,11 @@ The default configuration looks like this:
"level": "log"
},
"machineLearning": {
"enabled": true,
"urls": ["http://immich-machine-learning:3003"],
"availabilityChecks": {
"enabled": true,
"interval": 30000,
"timeout": 2000
},
"clip": {
"enabled": true,
"modelName": "ViT-B-32__openai"
@@ -96,27 +130,59 @@ The default configuration looks like this:
"enabled": true,
"maxDistance": 0.01
},
"enabled": true,
"facialRecognition": {
"enabled": true,
"modelName": "buffalo_l",
"minScore": 0.7,
"maxDistance": 0.5,
"minFaces": 3
}
"minFaces": 3,
"minScore": 0.7,
"modelName": "buffalo_l"
},
"ocr": {
"enabled": true,
"maxResolution": 736,
"minDetectionScore": 0.5,
"minRecognitionScore": 0.8,
"modelName": "PP-OCRv5_mobile"
},
"urls": ["http://immich-machine-learning:3003"]
},
"map": {
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
"enabled": true,
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
},
"reverseGeocoding": {
"enabled": true
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
},
"metadata": {
"faces": {
"import": false
}
},
"newVersionCheck": {
"enabled": true
},
"nightlyTasks": {
"clusterNewFaces": true,
"databaseCleanup": true,
"generateMemories": true,
"missingThumbnails": true,
"startTime": "00:00",
"syncQuotaUsage": true
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"host": "",
"ignoreCert": false,
"password": "",
"port": 587,
"secure": false,
"username": ""
}
}
},
"oauth": {
"autoLaunch": false,
"autoRegister": true,
@@ -128,70 +194,44 @@ The default configuration looks like this:
"issuerUrl": "",
"mobileOverrideEnabled": false,
"mobileRedirectUri": "",
"profileSigningAlgorithm": "none",
"roleClaim": "immich_role",
"scope": "openid email profile",
"signingAlgorithm": "RS256",
"profileSigningAlgorithm": "none",
"storageLabelClaim": "preferred_username",
"storageQuotaClaim": "immich_quota"
"storageQuotaClaim": "immich_quota",
"timeout": 30000,
"tokenEndpointAuthMethod": "client_secret_post"
},
"passwordLogin": {
"enabled": true
},
"reverseGeocoding": {
"enabled": true
},
"server": {
"externalDomain": "",
"loginPageMessage": "",
"publicUsers": true
},
"storageTemplate": {
"enabled": false,
"hashVerificationEnabled": true,
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
},
"image": {
"thumbnail": {
"format": "webp",
"size": 250,
"quality": 80
},
"preview": {
"format": "jpeg",
"size": 1440,
"quality": 80
},
"colorspace": "p3",
"extractEmbedded": false
},
"newVersionCheck": {
"enabled": true
},
"trash": {
"enabled": true,
"days": 30
"templates": {
"email": {
"albumInviteTemplate": "",
"albumUpdateTemplate": "",
"welcomeTemplate": ""
}
},
"theme": {
"customCss": ""
},
"library": {
"scan": {
"enabled": true,
"cronExpression": "0 0 * * *"
},
"watch": {
"enabled": false
}
},
"server": {
"externalDomain": "",
"loginPageMessage": ""
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"ignoreCert": false,
"host": "",
"port": 587,
"username": "",
"password": ""
}
}
"trash": {
"days": 30,
"enabled": true
},
"user": {
"deleteDelay": 7

View File

@@ -149,29 +149,31 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.

View File

@@ -1,4 +1,12 @@
[
{
"label": "v2.2.3",
"url": "https://docs.v2.2.3.archive.immich.app"
},
{
"label": "v2.2.2",
"url": "https://docs.v2.2.2.archive.immich.app"
},
{
"label": "v2.2.1",
"url": "https://docs.v2.2.1.archive.immich.app"

View File

@@ -35,7 +35,7 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "2.2.1",
"version": "2.2.3",
"description": "",
"main": "index.js",
"type": "module",
@@ -25,7 +25,7 @@
"@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@types/oidc-provider": "^9.0.0",
"@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4",

View File

@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { readFile, writeFile } from 'node:fs/promises';
import { basename, join } from 'node:path';
import sharp from 'sharp';
import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures';
import { makeRandomImage } from 'src/generators';
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
}) as DateTime<true>;
const yesterday = today.minus({ days: 1 });
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
// Generate unique color to ensure different checksums for each image
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
// Create a 100x100 solid color JPEG using Sharp
const imageBytes = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
// Add random suffix to filename to avoid collisions
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
const filepath = join(tempDir, uniqueFilename);
await writeFile(filepath, imageBytes);
// Filter out undefined values before writing EXIF
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
await exiftool.write(filepath, cleanExifData);
// Re-read the image bytes after EXIF has been written
const finalImageBytes = await readFile(filepath);
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
};
describe('/asset', () => {
let admin: LoginResponseDto;
let websocket: Socket;
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
});
});
describe('EXIF metadata extraction', () => {
describe('Additional date tag extraction', () => {
describe('Date-time vs time-only tag handling', () => {
it('should fall back to file timestamps when only time-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
// Exclude all date-time tags to force fallback to file timestamps
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
SonyDateTime2: undefined,
GPSDateStamp: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should prefer DateTimeOriginal over time-only tags', async () => {
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal, not TimeCreated
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
describe('GPSDateTime tag extraction', () => {
it('should extract GPSDateTime with GPS coordinates', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: 37.7749,
GPSLongitude: -122.4194,
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T12:30:00.000Z').getTime(),
);
});
});
describe('CreateDate tag extraction', () => {
it('should extract CreateDate when available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
CreateDate: '2023:11:15 10:30:00',
// Exclude other higher priority date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T10:30:00.000Z').getTime(),
);
});
});
describe('GPSDateStamp tag extraction', () => {
it('should fall back to file timestamps when only date-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
GPSLatitude: 51.5074,
GPSLongitude: -0.1278,
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
DateTimeOriginal: undefined,
CreateDate: undefined,
CreationDate: undefined,
GPSDateTime: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
});
/*
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
*
* NOT WRITABLE to JPEG:
* - MediaCreateDate: Can be read from video files but not written to JPEG
* - DateTimeCreated: Read-only tag in JPEG format
* - DateTimeUTC: Cannot be written to JPEG files
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
* - SubSecMediaCreateDate: Tag not defined for JPEG format
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
*
* WRITABLE but NOT READABLE from JPEG:
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
* - SubSecCreateDate: Can be written but not read back from JPEG
*
* EFFECTIVELY TESTABLE TAGS (writable and readable):
* - DateTimeOriginal ✓
* - CreateDate ✓
* - CreationDate ✓
* - GPSDateTime ✓
*
* The metadata service correctly handles non-readable tags and will fall back to
* file timestamps when only non-readable tags are present.
*/
describe('Date tag priority order', () => {
it('should respect the complete date tag priority order', async () => {
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
const testCases = [
{
name: 'DateTimeOriginal has highest priority among testable tags',
exifData: {
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-04-04T04:00:00.000Z',
},
{
name: 'CreationDate when DateTimeOriginal missing',
exifData: {
CreationDate: '2023:05:05 05:00:00', // TESTABLE
CreateDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-05-05T05:00:00.000Z',
},
{
name: 'CreationDate when standard EXIF tags missing',
exifData: {
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-07-07T07:00:00.000Z',
},
{
name: 'GPSDateTime when no other testable date tags present',
exifData: {
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
Make: 'SONY',
},
expectedDate: '2023-10-10T10:00:00.000Z',
},
];
for (const testCase of testCases) {
const { imageBytes, filename } = await createTestImageWithExif(
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
testCase.exifData,
);
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
expect(
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
`Date mismatch for: ${testCase.name}`,
).toBe(new Date(testCase.expectedDate).getTime());
}
});
});
describe('Edge cases for date tag handling', () => {
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
// Intentionally no GPSTimeStamp
// Exclude all other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should handle all testable date tags present to verify complete priority order', async () => {
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
// All TESTABLE date tags to JPEG format (writable AND readable)
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
// Note: Excluded non-testable tags:
// SubSec tags: writable but not readable from JPEG
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal as it has the highest priority among testable tags
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-04-04T04:00:00.000Z').getTime(),
);
});
it('should use CreationDate when SubSec tags are missing', async () => {
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
CreationDate: '2023:07:07 07:00:00', // WRITABLE
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
// Exclude SubSec and standard EXIF tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use CreationDate when available
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-07-07T07:00:00.000Z').getTime(),
);
});
it('should skip invalid date formats and use next valid tag', async () => {
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
// Note: Testing invalid date handling with only WRITABLE tags
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should skip invalid dates and use the first valid one (GPSDateTime)
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
});
});
describe('POST /assets/exist', () => {
it('ignores invalid deviceAssetIds', async () => {
const response = await utils.checkExistingAssets(user1.accessToken, {

View File

@@ -1,178 +0,0 @@
#!/usr/bin/env node
/**
* Script to generate test images with additional EXIF date tags
* This creates actual JPEG images with embedded metadata for testing
* Images are generated into e2e/test-assets/metadata/dates/
*/
import { execSync } from 'node:child_process';
import { writeFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import sharp from 'sharp';
interface TestImage {
filename: string;
description: string;
exifTags: Record<string, string>;
}
const testImages: TestImage[] = [
{
filename: 'time-created.jpg',
description: 'Image with TimeCreated tag',
exifTags: {
TimeCreated: '2023:11:15 14:30:00',
Make: 'Canon',
Model: 'EOS R5',
},
},
{
filename: 'gps-datetime.jpg',
description: 'Image with GPSDateTime and coordinates',
exifTags: {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: '37.7749',
GPSLongitude: '-122.4194',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'datetime-utc.jpg',
description: 'Image with DateTimeUTC tag',
exifTags: {
DateTimeUTC: '2023:11:15 10:30:00',
Make: 'Nikon',
Model: 'D850',
},
},
{
filename: 'gps-datestamp.jpg',
description: 'Image with GPSDateStamp and GPSTimeStamp',
exifTags: {
GPSDateStamp: '2023:11:15',
GPSTimeStamp: '08:30:00',
GPSLatitude: '51.5074',
GPSLongitude: '-0.1278',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'sony-datetime2.jpg',
description: 'Sony camera image with SonyDateTime2 tag',
exifTags: {
SonyDateTime2: '2023:11:15 06:30:00',
Make: 'SONY',
Model: 'ILCE-7RM5',
},
},
{
filename: 'date-priority-test.jpg',
description: 'Image with multiple date tags to test priority',
exifTags: {
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
DateTimeOriginal: '2023:02:02 02:00:00',
SubSecCreateDate: '2023:03:03 03:00:00',
CreateDate: '2023:04:04 04:00:00',
CreationDate: '2023:05:05 05:00:00',
DateTimeCreated: '2023:06:06 06:00:00',
TimeCreated: '2023:07:07 07:00:00',
GPSDateTime: '2023:08:08 08:00:00',
DateTimeUTC: '2023:09:09 09:00:00',
GPSDateStamp: '2023:10:10',
SonyDateTime2: '2023:11:11 11:00:00',
},
},
{
filename: 'new-tags-only.jpg',
description: 'Image with only additional date tags (no standard tags)',
exifTags: {
TimeCreated: '2023:12:01 15:45:30',
GPSDateTime: '2023:12:01 13:45:30Z',
DateTimeUTC: '2023:12:01 13:45:30',
GPSDateStamp: '2023:12:01',
SonyDateTime2: '2023:12:01 08:45:30',
GPSLatitude: '40.7128',
GPSLongitude: '-74.0060',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
];
const generateTestImages = async (): Promise<void> => {
// Target directory: e2e/test-assets/metadata/dates/
// Current file is in: e2e/src/
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
console.log('Generating test images with additional EXIF date tags...');
console.log(`Target directory: ${targetDir}`);
for (const image of testImages) {
try {
const imagePath = join(targetDir, image.filename);
// Create unique JPEG file using Sharp
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
const jpegData = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
writeFileSync(imagePath, jpegData);
// Build exiftool command to add EXIF data
const exifArgs = Object.entries(image.exifTags)
.map(([tag, value]) => `-${tag}="${value}"`)
.join(' ');
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
console.log(`Creating ${image.filename}: ${image.description}`);
execSync(command, { stdio: 'pipe' });
// Verify the tags were written
const verifyCommand = `exiftool -json "${imagePath}"`;
const result = execSync(verifyCommand, { encoding: 'utf8' });
const metadata = JSON.parse(result)[0];
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
// Log first date tag found for verification
const firstDateTag = Object.keys(image.exifTags).find(
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
);
if (firstDateTag && metadata[firstDateTag]) {
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
}
} catch (error) {
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
}
}
console.log('\nTest image generation complete!');
console.log('Files created in:', targetDir);
console.log('\nTo test these images:');
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
};
export { generateTestImages };
// Run the generator if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateTestImages().catch(console.error);
}

View File

@@ -13,6 +13,8 @@ from rich.logging import RichHandler
from uvicorn import Server
from uvicorn.workers import UvicornWorker
from .schemas import ModelPrecision
class ClipSettings(BaseModel):
textual: str | None = None
@@ -24,6 +26,11 @@ class FacialRecognitionSettings(BaseModel):
detection: str | None = None
class OcrSettings(BaseModel):
recognition: str | None = None
detection: str | None = None
class PreloadModelData(BaseModel):
clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None)
facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None)
@@ -37,6 +44,7 @@ class PreloadModelData(BaseModel):
del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"]
clip: ClipSettings = ClipSettings()
facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings()
ocr: OcrSettings = OcrSettings()
class MaxBatchSize(BaseModel):
@@ -70,6 +78,7 @@ class Settings(BaseSettings):
rknn_threads: int = 1
preload: PreloadModelData | None = None
max_batch_size: MaxBatchSize | None = None
openvino_precision: ModelPrecision = ModelPrecision.FP32
@property
def device_id(self) -> str:

View File

@@ -103,6 +103,20 @@ async def preload_models(preload: PreloadModelData) -> None:
ModelTask.FACIAL_RECOGNITION,
)
if preload.ocr.detection is not None:
await load_models(
preload.ocr.detection,
ModelType.DETECTION,
ModelTask.OCR,
)
if preload.ocr.recognition is not None:
await load_models(
preload.ocr.recognition,
ModelType.RECOGNITION,
ModelTask.OCR,
)
if preload.clip_fallback is not None:
log.warning(
"Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. "

View File

@@ -78,6 +78,14 @@ _INSIGHTFACE_MODELS = {
_PADDLE_MODELS = {
"PP-OCRv5_server",
"PP-OCRv5_mobile",
"CH__PP-OCRv5_server",
"CH__PP-OCRv5_mobile",
"EL__PP-OCRv5_mobile",
"EN__PP-OCRv5_mobile",
"ESLAV__PP-OCRv5_mobile",
"KOREAN__PP-OCRv5_mobile",
"LATIN__PP-OCRv5_mobile",
"TH__PP-OCRv5_mobile",
}
SUPPORTED_PROVIDERS = [

View File

@@ -1,8 +1,10 @@
from typing import Any
import cv2
import numpy as np
from numpy.typing import NDArray
from PIL import Image
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
from rapidocr.ch_ppocr_det.utils import DBPostProcess
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
@@ -10,11 +12,10 @@ from rapidocr.utils.typings import ModelType as RapidModelType
from immich_ml.config import log
from immich_ml.models.base import InferenceModel
from immich_ml.models.transforms import decode_cv2
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
from .schemas import OcrOptions, TextDetectionOutput
from .schemas import TextDetectionOutput
class TextDetector(InferenceModel):
@@ -22,15 +23,22 @@ class TextDetector(InferenceModel):
identity = (ModelType.DETECTION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
super().__init__(model_name.split("__")[-1], **model_kwargs, model_format=ModelFormat.ONNX)
self.max_resolution = 736
self.min_score = 0.5
self.score_mode = "fast"
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
self._empty: TextDetectionOutput = {
"image": np.empty(0, dtype=np.float32),
"boxes": np.empty(0, dtype=np.float32),
"scores": np.empty(0, dtype=np.float32),
}
self.postprocess = DBPostProcess(
thresh=0.3,
box_thresh=model_kwargs.get("minScore", 0.5),
max_candidates=1000,
unclip_ratio=1.6,
use_dilation=True,
score_mode="fast",
)
def _download(self) -> None:
model_info = InferSession.get_model_url(
@@ -52,35 +60,65 @@ class TextDetector(InferenceModel):
def _load(self) -> ModelSession:
# TODO: support other runtime sessions
session = OrtSession(self.model_path)
self.model = RapidTextDetector(
OcrOptions(
session=session.session,
limit_side_len=self.max_resolution,
limit_type="min",
box_thresh=self.min_score,
score_mode=self.score_mode,
)
)
return session
return OrtSession(self.model_path)
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
results = self.model(decode_cv2(inputs))
if results.boxes is None or results.scores is None or results.img is None:
# partly adapted from RapidOCR
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
w, h = inputs.size
if w < 32 or h < 32:
return self._empty
out = self.session.run(None, {"x": self._transform(inputs)})[0]
boxes, scores = self.postprocess(out, (h, w))
if len(boxes) == 0:
return self._empty
return {
"image": results.img,
"boxes": np.array(results.boxes, dtype=np.float32),
"scores": np.array(results.scores, dtype=np.float32),
"boxes": self.sorted_boxes(boxes),
"scores": np.array(scores, dtype=np.float32),
}
# adapted from RapidOCR
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
if img.height < img.width:
ratio = float(self.max_resolution) / img.height
else:
ratio = float(self.max_resolution) / img.width
resize_h = int(img.height * ratio)
resize_w = int(img.width * ratio)
resize_h = int(round(resize_h / 32) * 32)
resize_w = int(round(resize_w / 32) * 32)
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
img_np -= self.mean
img_np *= self.std_inv
img_np = np.transpose(img_np, (2, 0, 1))
return np.expand_dims(img_np, axis=0)
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
if len(dt_boxes) == 0:
return dt_boxes
# Sort by y, then identify lines, then sort by (line, x)
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
sorted_y = dt_boxes[y_order, 0, 1]
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
line_ids[0] = 0
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
# Create composite sort key for final ordering
# Shift line_ids by large factor, add x for tie-breaking
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
final_order = np.argsort(sort_key, kind="stable")
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
return sorted_boxes
def configure(self, **kwargs: Any) -> None:
if (max_resolution := kwargs.get("maxResolution")) is not None:
self.max_resolution = max_resolution
self.model.limit_side_len = max_resolution
if (min_score := kwargs.get("minScore")) is not None:
self.min_score = min_score
self.model.postprocess_op.box_thresh = min_score
self.postprocess.box_thresh = min_score
if (score_mode := kwargs.get("scoreMode")) is not None:
self.score_mode = score_mode
self.model.postprocess_op.score_mode = score_mode
self.postprocess.score_mode = score_mode

View File

@@ -1,9 +1,8 @@
from typing import Any
import cv2
import numpy as np
from numpy.typing import NDArray
from PIL.Image import Image
from PIL import Image
from rapidocr.ch_ppocr_rec import TextRecInput
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
from rapidocr.inference_engine.base import FileInfo, InferSession
@@ -14,6 +13,7 @@ from rapidocr.utils.vis_res import VisRes
from immich_ml.config import log, settings
from immich_ml.models.base import InferenceModel
from immich_ml.models.transforms import pil_to_cv2
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
@@ -25,6 +25,7 @@ class TextRecognizer(InferenceModel):
identity = (ModelType.RECOGNITION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
self.language = LangRec[model_name.split("__")[0]] if "__" in model_name else LangRec.CH
self.min_score = model_kwargs.get("minScore", 0.9)
self._empty: TextRecognitionOutput = {
"box": np.empty(0, dtype=np.float32),
@@ -41,7 +42,7 @@ class TextRecognizer(InferenceModel):
engine_type=EngineType.ONNXRUNTIME,
ocr_version=OCRVersion.PPOCRV5,
task_type=TaskType.REC,
lang_type=LangRec.CH,
lang_type=self.language,
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
)
)
@@ -61,21 +62,21 @@ class TextRecognizer(InferenceModel):
session=session.session,
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
rec_img_shape=(3, 48, 320),
lang_type=self.language,
)
)
return session
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
boxes, box_scores = texts["boxes"], texts["scores"]
if boxes.shape[0] == 0:
return self._empty
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
if rec.txts is None:
return self._empty
height, width = img.shape[0:2]
boxes[:, :, 0] /= width
boxes[:, :, 1] /= height
boxes[:, :, 0] /= img.width
boxes[:, :, 1] /= img.height
text_scores = np.array(rec.scores)
valid_text_score_idx = text_scores > self.min_score
@@ -87,7 +88,7 @@ class TextRecognizer(InferenceModel):
"textScore": text_scores[valid_text_score_idx],
}
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
img_crop_width = np.maximum(
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
).astype(np.int32)
@@ -98,22 +99,55 @@ class TextRecognizer(InferenceModel):
pts_std[:, 1:3, 0] = img_crop_width[:, None]
pts_std[:, 2:4, 1] = img_crop_height[:, None]
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
imgs: list[NDArray[np.float32]] = []
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
M = cv2.getPerspectiveTransform(box, pts_std)
dst_img: NDArray[np.float32] = cv2.warpPerspective(
img,
M,
dst_size,
borderMode=cv2.BORDER_REPLICATE,
flags=cv2.INTER_CUBIC,
) # type: ignore
dst_height, dst_width = dst_img.shape[0:2]
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
all_coeffs = self._get_perspective_transform(pts_std, boxes)
imgs: list[NDArray[np.uint8]] = []
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
dst_img = img.transform(
size=tuple(dst_size),
method=Image.Transform.PERSPECTIVE,
data=tuple(coeffs),
resample=Image.Resampling.BICUBIC,
)
dst_width, dst_height = dst_img.size
if dst_height * 1.0 / dst_width >= 1.5:
dst_img = np.rot90(dst_img)
imgs.append(dst_img)
dst_img = dst_img.rotate(90, expand=True)
imgs.append(pil_to_cv2(dst_img))
return imgs
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
N = src.shape[0]
x, y = src[:, :, 0], src[:, :, 1]
u, v = dst[:, :, 0], dst[:, :, 1]
A = np.zeros((N, 8, 9), dtype=np.float32)
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
A[:, ::2, 0] = x
A[:, ::2, 1] = y
A[:, ::2, 2] = 1
A[:, ::2, 6] = -u * x
A[:, ::2, 7] = -u * y
A[:, ::2, 8] = -u
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
A[:, 1::2, 3] = x
A[:, 1::2, 4] = y
A[:, 1::2, 5] = 1
A[:, 1::2, 6] = -v * x
A[:, 1::2, 7] = -v * y
A[:, 1::2, 8] = -v
# Solve using SVD for all matrices at once
_, _, Vt = np.linalg.svd(A)
H = Vt[:, -1, :].reshape(N, 3, 3)
H = H / H[:, 2:3, 2:3]
# Extract the 8 coefficients for each transformation
return np.column_stack(
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
) # pyright: ignore[reportReturnType]
def configure(self, **kwargs: Any) -> None:
self.min_score = kwargs.get("minScore", self.min_score)

View File

@@ -7,7 +7,6 @@ from typing_extensions import TypedDict
class TextDetectionOutput(TypedDict):
image: npt.NDArray[np.float32]
boxes: npt.NDArray[np.float32]
scores: npt.NDArray[np.float32]
@@ -21,8 +20,8 @@ class TextRecognitionOutput(TypedDict):
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
class OcrOptions(dict[str, Any]):
def __init__(self, **options: Any) -> None:
def __init__(self, lang_type: LangRec | None = None, **options: Any) -> None:
super().__init__(**options)
self.engine_type = EngineType.ONNXRUNTIME
self.lang_type = LangRec.CH
self.lang_type = lang_type
self.font_path = None

View File

@@ -46,6 +46,11 @@ class ModelSource(StrEnum):
PADDLE = "paddle"
class ModelPrecision(StrEnum):
FP16 = "FP16"
FP32 = "FP32"
ModelIdentity = tuple[ModelType, ModelTask]

View File

@@ -93,10 +93,12 @@ class OrtSession:
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
case "OpenVINOExecutionProvider":
openvino_dir = self.model_path.parent / "openvino"
device = f"GPU.{settings.device_id}"
options = {
"device_type": f"GPU.{settings.device_id}",
"precision": "FP32",
"cache_dir": (self.model_path.parent / "openvino").as_posix(),
"device_type": device,
"precision": settings.openvino_precision.value,
"cache_dir": openvino_dir.as_posix(),
}
case "CoreMLExecutionProvider":
options = {

View File

@@ -1,6 +1,6 @@
[project]
name = "immich-ml"
version = "2.2.1"
version = "2.2.3"
description = ""
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
requires-python = ">=3.10,<4.0"
@@ -22,7 +22,6 @@ dependencies = [
"rich>=13.4.2",
"tokenizers>=0.15.0,<1.0",
"uvicorn[standard]>=0.22.0,<1.0",
"setuptools>=78.1.0",
"rapidocr>=3.1.0",
]

View File

@@ -26,7 +26,7 @@ from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEn
from immich_ml.models.clip.visual import OpenClipVisualEncoder
from immich_ml.models.facial_recognition.detection import FaceDetector
from immich_ml.models.facial_recognition.recognition import FaceRecognizer
from immich_ml.schemas import ModelFormat, ModelTask, ModelType
from immich_ml.schemas import ModelFormat, ModelPrecision, ModelTask, ModelType
from immich_ml.sessions.ann import AnnSession
from immich_ml.sessions.ort import OrtSession
from immich_ml.sessions.rknn import RknnSession, run_inference
@@ -240,11 +240,16 @@ class TestOrtSession:
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
model_path = "/cache/ViT-B-32__openai/model.onnx"
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
assert session.provider_options == [
{"device_type": "GPU.0", "precision": "FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"},
{
"device_type": "GPU.0",
"precision": "FP32",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
},
{"arena_extend_strategy": "kSameAsRequested"},
]
@@ -262,6 +267,21 @@ class TestOrtSession:
}
]
def test_sets_openvino_to_fp16_if_enabled(self, mocker: MockerFixture) -> None:
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
mocker.patch.object(settings, "openvino_precision", ModelPrecision.FP16)
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider"])
assert session.provider_options == [
{
"device_type": "GPU.1",
"precision": "FP16",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
}
]
def test_sets_provider_options_for_cuda(self) -> None:
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
@@ -417,7 +437,7 @@ class TestRknnSession:
session.run(None, input_feed)
rknn_session.return_value.put.assert_called_once_with([input1, input2])
np_spy.call_count == 2
assert np_spy.call_count == 2
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
@@ -925,11 +945,34 @@ class TestCache:
any_order=True,
)
async def test_preloads_ocr_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings()
assert settings.preload is not None
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
await preload_models(settings.preload)
mock_get_model.assert_has_calls(
[
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
],
any_order=True,
)
async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings()
assert settings.preload is not None
@@ -937,6 +980,8 @@ class TestCache:
assert settings.preload.clip.textual == "ViT-B-32__openai"
assert settings.preload.facial_recognition.recognition == "buffalo_s"
assert settings.preload.facial_recognition.detection == "buffalo_s"
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
@@ -948,6 +993,8 @@ class TestCache:
mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH),
mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION),
mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION),
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
],
any_order=True,
)

View File

@@ -1100,7 +1100,6 @@ dependencies = [
{ name = "python-multipart" },
{ name = "rapidocr" },
{ name = "rich" },
{ name = "setuptools" },
{ name = "tokenizers" },
{ name = "uvicorn", extra = ["standard"] },
]
@@ -1188,7 +1187,6 @@ requires-dist = [
{ name = "rapidocr", specifier = ">=3.1.0" },
{ name = "rich", specifier = ">=13.4.2" },
{ name = "rknn-toolkit-lite2", marker = "extra == 'rknn'", specifier = ">=2.3.0,<3" },
{ name = "setuptools", specifier = ">=78.1.0" },
{ name = "tokenizers", specifier = ">=0.15.0,<1.0" },
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.22.0,<1.0" },
]

View File

@@ -88,7 +88,6 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
fi
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 3024,
"android.injected.version.name" => "2.2.1",
"android.injected.version.code" => 3026,
"android.injected.version.name" => "2.2.3",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -133,11 +133,15 @@
/* Begin PBXFileSystemSynchronizedRootGroup section */
B231F52D2E93A44A00BC45D1 /* Core */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Core;
sourceTree = "<group>";
};
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Sync;
sourceTree = "<group>";
};
@@ -526,14 +530,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
@@ -562,14 +562,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
@@ -718,7 +714,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
@@ -862,7 +858,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
@@ -892,7 +888,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
@@ -926,7 +922,7 @@
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
@@ -969,7 +965,7 @@
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
@@ -1009,7 +1005,7 @@
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
@@ -1048,7 +1044,7 @@
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
@@ -1092,7 +1088,7 @@
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
@@ -1133,7 +1129,7 @@
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 231;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;

View File

@@ -80,7 +80,7 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>2.1.0</string>
<string>2.2.1</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleURLTypes</key>
@@ -107,7 +107,7 @@
</dict>
</array>
<key>CFBundleVersion</key>
<string>231</string>
<string>233</string>
<key>FLTEnableImpeller</key>
<true/>
<key>ITSAppUsesNonExemptEncryption</key>

View File

@@ -32,6 +32,17 @@ platform :ios do
)
end
# Helper method to get version from pubspec.yaml
def get_version_from_pubspec
require 'yaml'
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
pubspec = YAML.load_file(pubspec_path)
version_string = pubspec['version']
version_string ? version_string.split('+').first : nil
end
# Helper method to configure code signing for all targets
def configure_code_signing(bundle_id_suffix: "")
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
@@ -158,7 +169,8 @@ platform :ios do
# Build and upload with version number
build_and_upload(
api_key: api_key,
version_number: "2.1.0"
version_number: get_version_from_pubspec,
distribute_external: false,
)
end
@@ -168,8 +180,9 @@ platform :ios do
path: "./Runner.xcodeproj",
targets: ["Runner", "ShareExtension", "WidgetExtension"]
)
increment_version_number(
version_number: "2.2.1"
version_number: get_version_from_pubspec
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -15,18 +15,18 @@ For _fastlane_ installation instructions, see [Installing _fastlane_](https://do
## iOS
### ios release_dev
### ios gha_testflight_dev
```sh
[bundle exec] fastlane ios release_dev
[bundle exec] fastlane ios gha_testflight_dev
```
iOS Development Build to TestFlight (requires separate bundle ID)
### ios release_ci
### ios gha_release_prod
```sh
[bundle exec] fastlane ios release_ci
[bundle exec] fastlane ios gha_release_prod
```
iOS Release to TestFlight

View File

@@ -239,7 +239,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
return _ref
?.read(uploadServiceProvider)
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken);
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
},
(error, stack) {
dPrint(() => "Error in backup zone $error, $stack");

View File

@@ -132,7 +132,8 @@ class SyncStreamService {
return;
// SyncCompleteV1 is used to signal the completion of the sync process. Cleanup stale assets and signal completion
case SyncEntityType.syncCompleteV1:
return _syncStreamRepository.pruneAssets();
return;
// return _syncStreamRepository.pruneAssets();
// Request to reset the client state. Clear everything related to remote entities
case SyncEntityType.syncResetV1:
return _syncStreamRepository.reset();

View File

@@ -65,7 +65,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
if (Store.isBetaTimelineEnabled) {
bool syncSuccess = false;
await Future.wait([
backgroundManager.syncLocal(),
backgroundManager.syncLocal(full: true),
backgroundManager.syncRemote().then((success) => syncSuccess = success),
]);

View File

@@ -14,7 +14,6 @@ import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/memory.provider.dart';
import 'package:immich_mobile/providers/infrastructure/people.provider.dart';
import 'package:immich_mobile/providers/infrastructure/readonly_mode.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/providers/search/search_input_focus.provider.dart';
import 'package:immich_mobile/providers/tab.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
@@ -108,8 +107,6 @@ class _TabShellPageState extends ConsumerState<TabShellPage> {
}
void _onNavigationSelected(TabsRouter router, int index, WidgetRef ref) {
ref.read(currentTabIndexProvider.notifier).state = index;
// On Photos page menu tapped
if (router.activeIndex == kPhotoTabIndex && index == kPhotoTabIndex) {
EventStream.shared.emit(const ScrollToTopEvent());

View File

@@ -73,27 +73,29 @@ class DriftSearchPage extends HookConsumerWidget {
);
}
search() async {
if (filter.value.isEmpty) {
searchFilter(SearchFilter filter) async {
if (filter.isEmpty) {
return;
}
if (preFilter == null && filter.value == previousFilter.value) {
if (preFilter == null && filter == previousFilter.value) {
return;
}
isSearching.value = true;
ref.watch(paginatedSearchProvider.notifier).clear();
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter.value);
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter);
if (!hasResult) {
context.showSnackBar(searchInfoSnackBar('search_no_result'.t(context: context)));
}
previousFilter.value = filter.value;
previousFilter.value = filter;
isSearching.value = false;
}
search() => searchFilter(filter.value);
loadMoreSearchResult() async {
isSearching.value = true;
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter.value);
@@ -108,7 +110,7 @@ class DriftSearchPage extends HookConsumerWidget {
searchPreFilter() {
if (preFilter != null) {
Future.delayed(Duration.zero, () {
search();
searchFilter(preFilter);
if (preFilter.location.city != null) {
locationCurrentFilterWidget.value = Text(preFilter.location.city!, style: context.textTheme.labelLarge);
@@ -122,7 +124,7 @@ class DriftSearchPage extends HookConsumerWidget {
searchPreFilter();
return null;
}, []);
}, [preFilter]);
showPeoplePicker() {
handleOnSelect(Set<PersonDto> value) {

View File

@@ -3,14 +3,12 @@ import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/models/search/search_filter.model.dart';
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/routing/router.dart';
class SimilarPhotosActionButton extends ConsumerWidget {
@@ -38,20 +36,7 @@ class SimilarPhotosActionButton extends ConsumerWidget {
),
);
/// Using and currentTabIndex to make sure we are using the correct
/// navigation behavior. We want to be able to navigate back to the
/// main timline using View In Timeline button without the need of
/// waiting for the timeline to be rebuild. At the same time, we want
/// to refresh the search page when tapping the Similar Photos button
/// while already in the Search tab.
final currentTabIndex = (ref.read(currentTabIndexProvider.notifier).state);
if (currentTabIndex != kSearchTabIndex) {
unawaited(context.router.navigate(const DriftSearchRoute()));
ref.read(currentTabIndexProvider.notifier).state = kSearchTabIndex;
} else {
unawaited(context.router.popAndPush(const DriftSearchRoute()));
}
unawaited(context.navigateTo(const DriftSearchRoute()));
}
@override

View File

@@ -5,4 +5,3 @@ final inLockedViewProvider = StateProvider<bool>((ref) => false);
final currentRouteNameProvider = StateProvider<String?>((ref) => null);
final previousRouteNameProvider = StateProvider<String?>((ref) => null);
final previousRouteDataProvider = StateProvider<RouteSettings?>((ref) => null);
final currentTabIndexProvider = StateProvider<int>((ref) => 0);

View File

@@ -67,7 +67,7 @@ class ServerInfoNotifier extends StateNotifier<ServerInfo> {
return;
}
if (clientVersion < serverVersion) {
if (clientVersion < serverVersion && clientVersion.differenceType(serverVersion) != SemVerType.patch) {
state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate);
return;
}

View File

@@ -89,9 +89,16 @@ class AssetMediaRepository {
return null;
}
// titleAsync gets the correct original filename for some assets on iOS
// otherwise using the `entity.title` would return a random GUID
return await entity.titleAsync;
try {
// titleAsync gets the correct original filename for some assets on iOS
// otherwise using the `entity.title` would return a random GUID
final originalFilename = await entity.titleAsync;
// treat empty filename as missing
return originalFilename.isNotEmpty ? originalFilename : null;
} catch (e) {
_log.warning("Failed to get original filename for asset: $id. Error: $e");
return null;
}
}
// TODO: make this more efficient

View File

@@ -1,3 +1,5 @@
enum SemVerType { major, minor, patch }
class SemVer {
final int major;
final int minor;
@@ -15,8 +17,20 @@ class SemVer {
}
factory SemVer.fromString(String version) {
if (version.toLowerCase().startsWith("v")) {
version = version.substring(1);
}
final parts = version.split("-")[0].split('.');
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
if (parts.length != 3) {
throw FormatException('Invalid semantic version string: $version');
}
try {
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
} catch (e) {
throw FormatException('Invalid semantic version string: $version');
}
}
bool operator >(SemVer other) {
@@ -54,6 +68,20 @@ class SemVer {
return other is SemVer && other.major == major && other.minor == minor && other.patch == patch;
}
SemVerType? differenceType(SemVer other) {
if (major != other.major) {
return SemVerType.major;
}
if (minor != other.minor) {
return SemVerType.minor;
}
if (patch != other.patch) {
return SemVerType.patch;
}
return null;
}
@override
int get hashCode => major.hashCode ^ minor.hashCode ^ patch.hashCode;
}

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 2.2.1
- API version: 2.2.3
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 2.2.1+3024
version: 2.2.3+3026
environment:
sdk: '>=3.8.0 <4.0.0'

View File

@@ -8,11 +8,11 @@ bash tool/build_android.sh x64
bash tool/build_android.sh armv7
bash tool/build_android.sh arm64
mv libisar_android_arm64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar_android_armv7.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar_android_x64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar_android_x86.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
)
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86/
)

View File

@@ -0,0 +1,92 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:immich_mobile/utils/semver.dart';
void main() {
group('SemVer', () {
test('Parses valid semantic version strings correctly', () {
final version = SemVer.fromString('1.2.3');
expect(version.major, 1);
expect(version.minor, 2);
expect(version.patch, 3);
});
test('Throws FormatException for invalid version strings', () {
expect(() => SemVer.fromString('1.2'), throwsFormatException);
expect(() => SemVer.fromString('a.b.c'), throwsFormatException);
expect(() => SemVer.fromString('1.2.3.4'), throwsFormatException);
});
test('Compares equal versons correctly', () {
final v1 = SemVer.fromString('1.2.3');
final v2 = SemVer.fromString('1.2.3');
expect(v1 == v2, isTrue);
expect(v1 > v2, isFalse);
expect(v1 < v2, isFalse);
});
test('Compares major version correctly', () {
final v1 = SemVer.fromString('2.0.0');
final v2 = SemVer.fromString('1.9.9');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Compares minor version correctly', () {
final v1 = SemVer.fromString('1.3.0');
final v2 = SemVer.fromString('1.2.9');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Compares patch version correctly', () {
final v1 = SemVer.fromString('1.2.4');
final v2 = SemVer.fromString('1.2.3');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Gives correct major difference type', () {
final v1 = SemVer.fromString('2.0.0');
final v2 = SemVer.fromString('1.9.9');
expect(v1.differenceType(v2), SemVerType.major);
});
test('Gives correct minor difference type', () {
final v1 = SemVer.fromString('1.3.0');
final v2 = SemVer.fromString('1.2.9');
expect(v1.differenceType(v2), SemVerType.minor);
});
test('Gives correct patch difference type', () {
final v1 = SemVer.fromString('1.2.4');
final v2 = SemVer.fromString('1.2.3');
expect(v1.differenceType(v2), SemVerType.patch);
});
test('Gives null difference type for equal versions', () {
final v1 = SemVer.fromString('1.2.3');
final v2 = SemVer.fromString('1.2.3');
expect(v1.differenceType(v2), isNull);
});
test('toString returns correct format', () {
final version = SemVer.fromString('1.2.3');
expect(version.toString(), '1.2.3');
});
test('Parses versions with leading v correctly', () {
final version1 = SemVer.fromString('v1.2.3');
expect(version1.major, 1);
expect(version1.minor, 2);
expect(version1.patch, 3);
final version2 = SemVer.fromString('V1.2.3');
expect(version2.major, 1);
expect(version2.minor, 2);
expect(version2.patch, 3);
});
});
}

View File

@@ -10006,7 +10006,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "2.2.1",
"version": "2.2.3",
"contact": {}
},
"tags": [],

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "2.2.1",
"version": "2.2.3",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",
@@ -19,7 +19,7 @@
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"typescript": "^5.3.3"
},
"repository": {

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 2.2.1
* 2.2.3
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/

26
pnpm-lock.yaml generated
View File

@@ -63,7 +63,7 @@ importers:
specifier: ^4.13.1
version: 4.13.4
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@vitest/coverage-v8':
specifier: ^3.0.0
@@ -211,7 +211,7 @@ importers:
specifier: ^3.4.2
version: 3.7.1
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@types/oidc-provider':
specifier: ^9.0.0
@@ -293,7 +293,7 @@ importers:
version: 1.0.4
devDependencies:
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
typescript:
specifier: ^5.3.3
@@ -582,7 +582,7 @@ importers:
specifier: ^2.0.0
version: 2.0.0
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@types/nodemailer':
specifier: ^7.0.0
@@ -684,8 +684,8 @@ importers:
specifier: file:../open-api/typescript-sdk
version: link:../open-api/typescript-sdk
'@immich/ui':
specifier: ^0.40.2
version: 0.40.2(@internationalized/date@3.8.2)(svelte@5.41.3)
specifier: ^0.42.2
version: 0.42.2(@internationalized/date@3.8.2)(svelte@5.41.3)
'@mapbox/mapbox-gl-rtl-text':
specifier: 0.2.3
version: 0.2.3(mapbox-gl@1.13.3)
@@ -2776,13 +2776,13 @@ packages:
'@immich/justified-layout-wasm@0.4.3':
resolution: {integrity: sha512-fpcQ7zPhP3Cp1bEXhONVYSUeIANa2uzaQFGKufUZQo5FO7aFT77szTVChhlCy4XaVy5R4ZvgSkA/1TJmeORz7Q==}
'@immich/svelte-markdown-preprocess@0.0.1':
resolution: {integrity: sha512-1vWoT4LO6fEyxrKwLKiNFECEkRVbuvpYPDvA7LavObTt2ijnonPYBDgfTwCPTofjxcocIGYUayv3CzgOzFiMOA==}
'@immich/svelte-markdown-preprocess@0.1.0':
resolution: {integrity: sha512-jgSOJEGLPKEXQCNRI4r4YUayeM2b0ZYLdzgKGl891jZBhOQIetlY7rU44kPpV1AA3/8wGDwNFKduIQZZ/qJYzg==}
peerDependencies:
svelte: ^5.0.0
'@immich/ui@0.40.2':
resolution: {integrity: sha512-6NS4yVx0VoyH+AaM7TISDaoIzZe3RuDOi6xMkK2LrOPQbKwTuheD2iagxsRYzUtJ9IPrmCPrwRBc9Jq5BkvmBQ==}
'@immich/ui@0.42.2':
resolution: {integrity: sha512-4+Kwjqtf4tU6orW2l/1njHB2nRuw2PwEtLJyt2GYv8UBcp1Sxi0Qpwh5b0T0TIyZlDJatggY+yPl4cRQojI4oA==}
peerDependencies:
svelte: ^5.0.0
@@ -14343,13 +14343,13 @@ snapshots:
'@immich/justified-layout-wasm@0.4.3': {}
'@immich/svelte-markdown-preprocess@0.0.1(svelte@5.41.3)':
'@immich/svelte-markdown-preprocess@0.1.0(svelte@5.41.3)':
dependencies:
svelte: 5.41.3
'@immich/ui@0.40.2(@internationalized/date@3.8.2)(svelte@5.41.3)':
'@immich/ui@0.42.2(@internationalized/date@3.8.2)(svelte@5.41.3)':
dependencies:
'@immich/svelte-markdown-preprocess': 0.0.1(svelte@5.41.3)
'@immich/svelte-markdown-preprocess': 0.1.0(svelte@5.41.3)
'@mdi/js': 7.4.47
bits-ui: 2.9.8(@internationalized/date@3.8.2)(svelte@5.41.3)
luxon: 3.7.2

View File

@@ -94,7 +94,7 @@
| LivePhoto/MotionPhoto воспроизведение и бекап | Да | Да |
| Отображение 360° изображений | Нет | Да |
| Настраиваемая структура хранилища | Да | Да |
| Общий доступ к контенту | Нет | Да |
| Общий доступ к контенту | Да | Да |
| Архив и избранное | Да | Да |
| Мировая карта | Да | Да |
| Совместное использование | Да | Да |
@@ -104,7 +104,7 @@
| Галереи только для просмотра | Да | Да |
| Коллажи | Да | Да |
| Метки (теги) | Нет | Да |
| Просмотр папкой | Нет | Да |
| Просмотр папкой | Да | Да |
## Перевод

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "2.2.1",
"version": "2.2.3",
"description": "",
"author": "",
"private": true,
@@ -129,7 +129,7 @@
"@types/luxon": "^3.6.2",
"@types/mock-fs": "^4.13.1",
"@types/multer": "^2.0.0",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@types/nodemailer": "^7.0.0",
"@types/picomatch": "^4.0.0",
"@types/pngjs": "^6.0.5",

View File

@@ -356,7 +356,7 @@ export const columns = {
'asset.stackId',
'asset.libraryId',
],
syncAlbumUser: ['album_user.albumsId as albumId', 'album_user.usersId as userId', 'album_user.role'],
syncAlbumUser: ['album_user.albumId as albumId', 'album_user.userId as userId', 'album_user.role'],
syncStack: ['stack.id', 'stack.createdAt', 'stack.updatedAt', 'stack.primaryAssetId', 'stack.ownerId'],
syncUser: ['id', 'name', 'email', 'avatarColor', 'deletedAt', 'updateId', 'profileImagePath', 'profileChangedAt'],
stack: ['stack.id', 'stack.primaryAssetId', 'ownerId'],

View File

@@ -25,8 +25,8 @@ select
"album"."id"
from
"album"
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."usersId"
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."userId"
and "user"."deletedAt" is null
where
"album"."id" in ($1)
@@ -52,8 +52,8 @@ select
"album"."id"
from
"album"
left join "album_user" on "album_user"."albumsId" = "album"."id"
left join "user" on "user"."id" = "album_user"."usersId"
left join "album_user" on "album_user"."albumId" = "album"."id"
left join "user" on "user"."id" = "album_user"."userId"
and "user"."deletedAt" is null
where
"album"."id" in ($1)
@@ -81,11 +81,11 @@ select
"asset"."livePhotoVideoId"
from
"album"
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumsId"
inner join "asset" on "asset"."id" = "albumAssets"."assetsId"
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumId"
inner join "asset" on "asset"."id" = "albumAssets"."assetId"
and "asset"."deletedAt" is null
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."usersId"
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."userId"
and "user"."deletedAt" is null
cross join "target"
where
@@ -136,11 +136,11 @@ from
"shared_link"
left join "album" on "album"."id" = "shared_link"."albumId"
and "album"."deletedAt" is null
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
left join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
left join "asset" on "asset"."id" = "shared_link_asset"."assetId"
and "asset"."deletedAt" is null
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetsId"
left join "album_asset" on "album_asset"."albumId" = "album"."id"
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetId"
and "albumAssets"."deletedAt" is null
where
"shared_link"."id" = $1

View File

@@ -43,13 +43,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -76,9 +76,9 @@ select
from
"asset"
left join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
where
"album_asset"."albumsId" = "album"."id"
"album_asset"."albumId" = "album"."id"
and "asset"."deletedAt" is null
and "asset"."visibility" in ('archive', 'timeline')
order by
@@ -134,18 +134,18 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers"
from
"album"
inner join "album_asset" on "album_asset"."albumsId" = "album"."id"
inner join "album_asset" on "album_asset"."albumId" = "album"."id"
where
(
"album"."ownerId" = $1
@@ -154,11 +154,11 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
and "album_user"."usersId" = $2
"album_user"."albumId" = "album"."id"
and "album_user"."userId" = $2
)
)
and "album_asset"."assetsId" = $3
and "album_asset"."assetId" = $3
and "album"."deletedAt" is null
order by
"album"."createdAt" desc,
@@ -166,7 +166,7 @@ order by
-- AlbumRepository.getMetadataForIds
select
"album_asset"."albumsId" as "albumId",
"album_asset"."albumId" as "albumId",
min(
("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date
) as "startDate",
@@ -177,13 +177,13 @@ select
count("asset"."id")::int as "assetCount"
from
"asset"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
where
"asset"."visibility" in ('archive', 'timeline')
and "album_asset"."albumsId" in ($1)
and "album_asset"."albumId" in ($1)
and "asset"."deletedAt" is null
group by
"album_asset"."albumsId"
"album_asset"."albumId"
-- AlbumRepository.getOwned
select
@@ -228,13 +228,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -283,13 +283,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -332,10 +332,10 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
and (
"album"."ownerId" = $1
or "album_user"."usersId" = $2
or "album_user"."userId" = $2
)
)
or exists (
@@ -382,7 +382,7 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
)
and not exists (
select
@@ -397,7 +397,7 @@ order by
-- AlbumRepository.removeAssetsFromAll
delete from "album_asset"
where
"album_asset"."assetsId" in ($1)
"album_asset"."assetId" in ($1)
-- AlbumRepository.getAssetIds
select
@@ -405,8 +405,8 @@ select
from
"album_asset"
where
"album_asset"."albumsId" = $1
and "album_asset"."assetsId" in ($2)
"album_asset"."albumId" = $1
and "album_asset"."assetId" in ($2)
-- AlbumRepository.getContributorCounts
select
@@ -414,10 +414,10 @@ select
count(*) as "assetCount"
from
"album_asset"
inner join "asset" on "asset"."id" = "assetsId"
inner join "asset" on "asset"."id" = "assetId"
where
"asset"."deletedAt" is null
and "album_asset"."albumsId" = $1
and "album_asset"."albumId" = $1
group by
"asset"."ownerId"
order by
@@ -427,10 +427,10 @@ order by
insert into
"album_asset"
select
"album_asset"."albumsId",
$1 as "assetsId"
"album_asset"."albumId",
$1 as "assetId"
from
"album_asset"
where
"album_asset"."assetsId" = $2
"album_asset"."assetId" = $2
on conflict do nothing

View File

@@ -2,12 +2,12 @@
-- AlbumUserRepository.create
insert into
"album_user" ("usersId", "albumsId")
"album_user" ("userId", "albumId")
values
($1, $2)
returning
"usersId",
"albumsId",
"userId",
"albumId",
"role"
-- AlbumUserRepository.update
@@ -15,13 +15,13 @@ update "album_user"
set
"role" = $1
where
"usersId" = $2
and "albumsId" = $3
"userId" = $2
and "albumId" = $3
returning
*
-- AlbumUserRepository.delete
delete from "album_user"
where
"usersId" = $1
and "albumsId" = $2
"userId" = $1
and "albumId" = $2

View File

@@ -31,9 +31,9 @@ select
"tag"."value"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetsId"
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags"
from

View File

@@ -160,9 +160,9 @@ select
"tag"."parentId"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetsId"
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags",
to_json("asset_exif") as "exifInfo"

View File

@@ -23,8 +23,8 @@ where
from
"album_asset"
where
"asset"."id" = "album_asset"."assetsId"
and "album_asset"."albumsId" in ($3)
"asset"."id" = "album_asset"."assetId"
and "album_asset"."albumId" in ($3)
)
)
order by

View File

@@ -37,7 +37,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -66,7 +66,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -104,7 +104,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -137,7 +137,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -159,15 +159,15 @@ where
-- MemoryRepository.getAssetIds
select
"assetsId"
"assetId"
from
"memory_asset"
where
"memoriesId" = $1
and "assetsId" in ($2)
and "assetId" in ($2)
-- MemoryRepository.addAssetIds
insert into
"memory_asset" ("memoriesId", "assetsId")
"memory_asset" ("memoriesId", "assetId")
values
($1, $2)

View File

@@ -4,10 +4,10 @@
insert into
"shared_link_asset"
select
$1 as "assetsId",
"shared_link_asset"."sharedLinksId"
$1 as "assetId",
"shared_link_asset"."sharedLinkId"
from
"shared_link_asset"
where
"shared_link_asset"."assetsId" = $2
"shared_link_asset"."assetId" = $2
on conflict do nothing

View File

@@ -19,7 +19,7 @@ from
to_json("exifInfo") as "exifInfo"
from
"shared_link_asset"
inner join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
inner join "asset" on "asset"."id" = "shared_link_asset"."assetId"
inner join lateral (
select
"asset_exif".*
@@ -29,7 +29,7 @@ from
"asset_exif"."assetId" = "asset"."id"
) as "exifInfo" on true
where
"shared_link"."id" = "shared_link_asset"."sharedLinksId"
"shared_link"."id" = "shared_link_asset"."sharedLinkId"
and "asset"."deletedAt" is null
order by
"asset"."fileCreatedAt" asc
@@ -51,7 +51,7 @@ from
to_json("owner") as "owner"
from
"album"
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
left join "album_asset" on "album_asset"."albumId" = "album"."id"
left join lateral (
select
"asset".*,
@@ -67,7 +67,7 @@ from
"asset_exif"."assetId" = "asset"."id"
) as "exifInfo" on true
where
"album_asset"."assetsId" = "asset"."id"
"album_asset"."assetId" = "asset"."id"
and "asset"."deletedAt" is null
order by
"asset"."fileCreatedAt" asc
@@ -108,14 +108,14 @@ select distinct
to_json("album") as "album"
from
"shared_link"
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
left join lateral (
select
json_agg("asset") as "assets"
from
"asset"
where
"asset"."id" = "shared_link_asset"."assetsId"
"asset"."id" = "shared_link_asset"."assetId"
and "asset"."deletedAt" is null
) as "assets" on true
left join lateral (

View File

@@ -89,9 +89,9 @@ select
"tag"."parentId"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"tag_asset"."assetsId" = "asset"."id"
"tag_asset"."assetId" = "asset"."id"
) as agg
) as "tags",
to_json("exifInfo") as "exifInfo"

View File

@@ -2,12 +2,12 @@
-- SyncRepository.album.getCreatedAfter
select
"albumsId" as "id",
"albumId" as "id",
"createId"
from
"album_user"
where
"usersId" = $1
"userId" = $1
and "createId" >= $2
and "createId" < $3
order by
@@ -40,13 +40,13 @@ select distinct
"album"."updateId"
from
"album" as "album"
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumsId"
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumId"
where
"album"."updateId" < $1
and "album"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_users"."usersId" = $4
or "album_users"."userId" = $4
)
order by
"album"."updateId" asc
@@ -72,12 +72,12 @@ select
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
inner join "asset" on "asset"."id" = "album_asset"."assetId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -102,16 +102,16 @@ select
"asset"."updateId"
from
"asset" as "asset"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"asset"."updateId" < $1
and "asset"."updateId" > $2
and "album_asset"."updateId" <= $3
and (
"album"."ownerId" = $4
or "album_user"."usersId" = $5
or "album_user"."userId" = $5
)
order by
"asset"."updateId" asc
@@ -137,15 +137,15 @@ select
"asset"."libraryId"
from
"album_asset" as "album_asset"
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "asset" on "asset"."id" = "album_asset"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
@@ -180,12 +180,12 @@ select
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -219,16 +219,16 @@ select
"asset_exif"."updateId"
from
"asset_exif" as "asset_exif"
inner join "album_asset" on "album_asset"."assetsId" = "asset_exif"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album_asset" on "album_asset"."assetId" = "asset_exif"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"asset_exif"."updateId" < $1
and "asset_exif"."updateId" > $2
and "album_asset"."updateId" <= $3
and (
"album"."ownerId" = $4
or "album_user"."usersId" = $5
or "album_user"."userId" = $5
)
order by
"asset_exif"."updateId" asc
@@ -263,23 +263,23 @@ select
"asset_exif"."fps"
from
"album_asset" as "album_asset"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
-- SyncRepository.albumToAsset.getBackfill
select
"album_asset"."assetsId" as "assetId",
"album_asset"."albumsId" as "albumId",
"album_asset"."assetId" as "assetId",
"album_asset"."albumId" as "albumId",
"album_asset"."updateId"
from
"album_asset" as "album_asset"
@@ -287,7 +287,7 @@ where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -311,11 +311,11 @@ where
union
(
select
"album_user"."albumsId" as "id"
"album_user"."albumId" as "id"
from
"album_user"
where
"album_user"."usersId" = $4
"album_user"."userId" = $4
)
)
order by
@@ -323,27 +323,27 @@ order by
-- SyncRepository.albumToAsset.getUpserts
select
"album_asset"."assetsId" as "assetId",
"album_asset"."albumsId" as "albumId",
"album_asset"."assetId" as "assetId",
"album_asset"."albumId" as "albumId",
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
-- SyncRepository.albumUser.getBackfill
select
"album_user"."albumsId" as "albumId",
"album_user"."usersId" as "userId",
"album_user"."albumId" as "albumId",
"album_user"."userId" as "userId",
"album_user"."role",
"album_user"."updateId"
from
@@ -352,7 +352,7 @@ where
"album_user"."updateId" < $1
and "album_user"."updateId" <= $2
and "album_user"."updateId" >= $3
and "albumsId" = $4
and "albumId" = $4
order by
"album_user"."updateId" asc
@@ -376,11 +376,11 @@ where
union
(
select
"album_user"."albumsId" as "id"
"album_user"."albumId" as "id"
from
"album_user"
where
"album_user"."usersId" = $4
"album_user"."userId" = $4
)
)
order by
@@ -388,8 +388,8 @@ order by
-- SyncRepository.albumUser.getUpserts
select
"album_user"."albumsId" as "albumId",
"album_user"."usersId" as "userId",
"album_user"."albumId" as "albumId",
"album_user"."userId" as "userId",
"album_user"."role",
"album_user"."updateId"
from
@@ -397,7 +397,7 @@ from
where
"album_user"."updateId" < $1
and "album_user"."updateId" > $2
and "album_user"."albumsId" in (
and "album_user"."albumId" in (
select
"id"
from
@@ -407,11 +407,11 @@ where
union
(
select
"albumUsers"."albumsId" as "id"
"albumUsers"."albumId" as "id"
from
"album_user" as "albumUsers"
where
"albumUsers"."usersId" = $4
"albumUsers"."userId" = $4
)
)
order by
@@ -656,7 +656,7 @@ order by
-- SyncRepository.memoryToAsset.getUpserts
select
"memoriesId" as "memoryId",
"assetsId" as "assetId",
"assetId" as "assetId",
"updateId"
from
"memory_asset" as "memory_asset"

View File

@@ -84,19 +84,19 @@ where
-- TagRepository.addAssetIds
insert into
"tag_asset" ("tagsId", "assetsId")
"tag_asset" ("tagId", "assetId")
values
($1, $2)
-- TagRepository.removeAssetIds
delete from "tag_asset"
where
"tagsId" = $1
and "assetsId" in ($2)
"tagId" = $1
and "assetId" in ($2)
-- TagRepository.upsertAssetIds
insert into
"tag_asset" ("assetId", "tagsIds")
"tag_asset" ("assetId", "tagIds")
values
($1, $2)
on conflict do nothing
@@ -107,9 +107,9 @@ returning
begin
delete from "tag_asset"
where
"assetsId" = $1
"assetId" = $1
insert into
"tag_asset" ("tagsId", "assetsId")
"tag_asset" ("tagId", "assetId")
values
($1, $2)
on conflict do nothing

View File

@@ -52,8 +52,8 @@ class ActivityAccess {
return this.db
.selectFrom('album')
.select('album.id')
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
.where('album.id', 'in', [...albumIds])
.where('album.isActivityEnabled', '=', true)
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('user.id', '=', userId)]))
@@ -96,8 +96,8 @@ class AlbumAccess {
return this.db
.selectFrom('album')
.select('album.id')
.leftJoin('album_user', 'album_user.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user', 'album_user.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.userId').on('user.deletedAt', 'is', null))
.where('album.id', 'in', [...albumIds])
.where('album.deletedAt', 'is', null)
.where('user.id', '=', userId)
@@ -138,12 +138,12 @@ class AssetAccess {
return this.db
.with('target', (qb) => qb.selectNoFrom(sql`array[${sql.join([...assetIds])}]::uuid[]`.as('ids')))
.selectFrom('album')
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumsId')
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumId')
.innerJoin('asset', (join) =>
join.onRef('asset.id', '=', 'albumAssets.assetsId').on('asset.deletedAt', 'is', null),
join.onRef('asset.id', '=', 'albumAssets.assetId').on('asset.deletedAt', 'is', null),
)
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
.crossJoin('target')
.select(['asset.id', 'asset.livePhotoVideoId'])
.where((eb) =>
@@ -223,13 +223,13 @@ class AssetAccess {
return this.db
.selectFrom('shared_link')
.leftJoin('album', (join) => join.onRef('album.id', '=', 'shared_link.albumId').on('album.deletedAt', 'is', null))
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoin('asset', (join) =>
join.onRef('asset.id', '=', 'shared_link_asset.assetsId').on('asset.deletedAt', 'is', null),
join.onRef('asset.id', '=', 'shared_link_asset.assetId').on('asset.deletedAt', 'is', null),
)
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
.leftJoin('asset as albumAssets', (join) =>
join.onRef('albumAssets.id', '=', 'album_asset.assetsId').on('albumAssets.deletedAt', 'is', null),
join.onRef('albumAssets.id', '=', 'album_asset.assetId').on('albumAssets.deletedAt', 'is', null),
)
.select([
'asset.id as assetId',

View File

@@ -7,36 +7,36 @@ import { DB } from 'src/schema';
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
export type AlbumPermissionId = {
albumsId: string;
usersId: string;
albumId: string;
userId: string;
};
@Injectable()
export class AlbumUserRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
create(albumUser: Insertable<AlbumUserTable>) {
return this.db
.insertInto('album_user')
.values(albumUser)
.returning(['usersId', 'albumsId', 'role'])
.returning(['userId', 'albumId', 'role'])
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
update({ usersId, albumsId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
update({ userId, albumId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
return this.db
.updateTable('album_user')
.set(dto)
.where('usersId', '=', usersId)
.where('albumsId', '=', albumsId)
.where('userId', '=', userId)
.where('albumId', '=', albumId)
.returningAll()
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
async delete({ usersId, albumsId }: AlbumPermissionId): Promise<void> {
await this.db.deleteFrom('album_user').where('usersId', '=', usersId).where('albumsId', '=', albumsId).execute();
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
async delete({ userId, albumId }: AlbumPermissionId): Promise<void> {
await this.db.deleteFrom('album_user').where('userId', '=', userId).where('albumId', '=', albumId).execute();
}
}

View File

@@ -33,11 +33,11 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'album'>) => {
.selectFrom('album_user')
.select('album_user.role')
.select((eb) =>
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.usersId'))
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.userId'))
.$notNull()
.as('user'),
)
.whereRef('album_user.albumsId', '=', 'album.id'),
.whereRef('album_user.albumId', '=', 'album.id'),
)
.$notNull()
.as('albumUsers');
@@ -57,8 +57,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'album'>) => {
.selectAll('asset')
.leftJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select((eb) => eb.table('asset_exif').$castTo<Exif>().as('exifInfo'))
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.whereRef('album_asset.albumsId', '=', 'album.id')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.whereRef('album_asset.albumId', '=', 'album.id')
.where('asset.deletedAt', 'is', null)
.$call(withDefaultVisibility)
.orderBy('asset.fileCreatedAt', 'desc')
@@ -92,19 +92,19 @@ export class AlbumRepository {
return this.db
.selectFrom('album')
.selectAll('album')
.innerJoin('album_asset', 'album_asset.albumsId', 'album.id')
.innerJoin('album_asset', 'album_asset.albumId', 'album.id')
.where((eb) =>
eb.or([
eb('album.ownerId', '=', ownerId),
eb.exists(
eb
.selectFrom('album_user')
.whereRef('album_user.albumsId', '=', 'album.id')
.where('album_user.usersId', '=', ownerId),
.whereRef('album_user.albumId', '=', 'album.id')
.where('album_user.userId', '=', ownerId),
),
]),
)
.where('album_asset.assetsId', '=', assetId)
.where('album_asset.assetId', '=', assetId)
.where('album.deletedAt', 'is', null)
.orderBy('album.createdAt', 'desc')
.select(withOwner)
@@ -125,16 +125,16 @@ export class AlbumRepository {
this.db
.selectFrom('asset')
.$call(withDefaultVisibility)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.select('album_asset.albumsId as albumId')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.select('album_asset.albumId as albumId')
.select((eb) => eb.fn.min(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('startDate'))
.select((eb) => eb.fn.max(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('endDate'))
// lastModifiedAssetTimestamp is only used in mobile app, please remove if not need
.select((eb) => eb.fn.max('asset.updatedAt').as('lastModifiedAssetTimestamp'))
.select((eb) => sql<number>`${eb.fn.count('asset.id')}::int`.as('assetCount'))
.where('album_asset.albumsId', 'in', ids)
.where('album_asset.albumId', 'in', ids)
.where('asset.deletedAt', 'is', null)
.groupBy('album_asset.albumsId')
.groupBy('album_asset.albumId')
.execute()
);
}
@@ -166,8 +166,8 @@ export class AlbumRepository {
eb.exists(
eb
.selectFrom('album_user')
.whereRef('album_user.albumsId', '=', 'album.id')
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.usersId', '=', ownerId)])),
.whereRef('album_user.albumId', '=', 'album.id')
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.userId', '=', ownerId)])),
),
eb.exists(
eb
@@ -195,7 +195,7 @@ export class AlbumRepository {
.selectAll('album')
.where('album.ownerId', '=', ownerId)
.where('album.deletedAt', 'is', null)
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumsId', '=', 'album.id'))))
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumId', '=', 'album.id'))))
.where((eb) => eb.not(eb.exists(eb.selectFrom('shared_link').whereRef('shared_link.albumId', '=', 'album.id'))))
.select(withOwner)
.orderBy('album.createdAt', 'desc')
@@ -217,7 +217,7 @@ export class AlbumRepository {
@GenerateSql({ params: [[DummyValue.UUID]] })
@Chunked()
async removeAssetsFromAll(assetIds: string[]): Promise<void> {
await this.db.deleteFrom('album_asset').where('album_asset.assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('album_asset').where('album_asset.assetId', 'in', assetIds).execute();
}
@Chunked({ paramIndex: 1 })
@@ -228,8 +228,8 @@ export class AlbumRepository {
await this.db
.deleteFrom('album_asset')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.assetsId', 'in', assetIds)
.where('album_asset.albumId', '=', albumId)
.where('album_asset.assetId', 'in', assetIds)
.execute();
}
@@ -250,10 +250,10 @@ export class AlbumRepository {
return this.db
.selectFrom('album_asset')
.selectAll()
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.assetsId', 'in', assetIds)
.where('album_asset.albumId', '=', albumId)
.where('album_asset.assetId', 'in', assetIds)
.execute()
.then((results) => new Set(results.map(({ assetsId }) => assetsId)));
.then((results) => new Set(results.map(({ assetId }) => assetId)));
}
async addAssetIds(albumId: string, assetIds: string[]): Promise<void> {
@@ -276,7 +276,7 @@ export class AlbumRepository {
await tx
.insertInto('album_user')
.values(
albumUsers.map((albumUser) => ({ albumsId: newAlbum.id, usersId: albumUser.userId, role: albumUser.role })),
albumUsers.map((albumUser) => ({ albumId: newAlbum.id, userId: albumUser.userId, role: albumUser.role })),
)
.execute();
}
@@ -317,12 +317,12 @@ export class AlbumRepository {
await db
.insertInto('album_asset')
.values(assetIds.map((assetId) => ({ albumsId: albumId, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ albumId, assetId })))
.execute();
}
@Chunked({ chunkSize: 30_000 })
async addAssetIdsToAlbums(values: { albumsId: string; assetsId: string }[]): Promise<void> {
async addAssetIdsToAlbums(values: { albumId: string; assetId: string }[]): Promise<void> {
if (values.length === 0) {
return;
}
@@ -344,7 +344,7 @@ export class AlbumRepository {
.updateTable('album')
.set((eb) => ({
albumThumbnailAssetId: this.updateThumbnailBuilder(eb)
.select('album_asset.assetsId')
.select('album_asset.assetId')
.orderBy('asset.fileCreatedAt', 'desc')
.limit(1),
}))
@@ -360,7 +360,7 @@ export class AlbumRepository {
eb.exists(
this.updateThumbnailBuilder(eb)
.select(sql`1`.as('1'))
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetsId'), // Has invalid assets
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetId'), // Has invalid assets
),
),
]),
@@ -375,9 +375,9 @@ export class AlbumRepository {
return eb
.selectFrom('album_asset')
.innerJoin('asset', (join) =>
join.onRef('album_asset.assetsId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
join.onRef('album_asset.assetId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
)
.whereRef('album_asset.albumsId', '=', 'album.id');
.whereRef('album_asset.albumId', '=', 'album.id');
}
/**
@@ -388,9 +388,9 @@ export class AlbumRepository {
getContributorCounts(id: string) {
return this.db
.selectFrom('album_asset')
.innerJoin('asset', 'asset.id', 'assetsId')
.innerJoin('asset', 'asset.id', 'assetId')
.where('asset.deletedAt', 'is', sql.lit(null))
.where('album_asset.albumsId', '=', id)
.where('album_asset.albumId', '=', id)
.select('asset.ownerId as userId')
.select((eb) => eb.fn.countAll<number>().as('assetCount'))
.groupBy('asset.ownerId')
@@ -405,8 +405,8 @@ export class AlbumRepository {
.expression((eb) =>
eb
.selectFrom('album_asset')
.select((eb) => ['album_asset.albumsId', eb.val(targetAssetId).as('assetsId')])
.where('album_asset.assetsId', '=', sourceAssetId),
.select((eb) => ['album_asset.albumId', eb.val(targetAssetId).as('assetId')])
.where('album_asset.assetId', '=', sourceAssetId),
)
.onConflict((oc) => oc.doNothing())
.execute();

View File

@@ -46,8 +46,8 @@ export class AssetJobRepository {
eb
.selectFrom('tag')
.select(['tag.value'])
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags'),
)
.limit(1)

View File

@@ -563,8 +563,8 @@ export class AssetRepository {
.$if(!!options.visibility, (qb) => qb.where('asset.visibility', '=', options.visibility!))
.$if(!!options.albumId, (qb) =>
qb
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
)
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.withStacked, (qb) =>
@@ -641,8 +641,8 @@ export class AssetRepository {
eb.exists(
eb
.selectFrom('album_asset')
.whereRef('album_asset.assetsId', '=', 'asset.id')
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
.whereRef('album_asset.assetId', '=', 'asset.id')
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
),
),
)

View File

@@ -26,8 +26,8 @@ export class DownloadRepository {
downloadAlbumId(albumId: string) {
return builder(this.db)
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
.where('album_asset.albumsId', '=', albumId)
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
.where('album_asset.albumId', '=', albumId)
.stream();
}

View File

@@ -126,8 +126,8 @@ export class MapRepository {
eb.exists((eb) =>
eb
.selectFrom('album_asset')
.whereRef('asset.id', '=', 'album_asset.assetsId')
.where('album_asset.albumsId', 'in', albumIds),
.whereRef('asset.id', '=', 'album_asset.assetId')
.where('album_asset.albumId', 'in', albumIds),
),
);
}

View File

@@ -18,7 +18,7 @@ export class MemoryRepository implements IBulkAsset {
await this.db
.deleteFrom('memory_asset')
.using('asset')
.whereRef('memory_asset.assetsId', '=', 'asset.id')
.whereRef('memory_asset.assetId', '=', 'asset.id')
.where('asset.visibility', '!=', AssetVisibility.Timeline)
.execute();
@@ -64,7 +64,7 @@ export class MemoryRepository implements IBulkAsset {
eb
.selectFrom('asset')
.selectAll('asset')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
.orderBy('asset.fileCreatedAt', 'asc')
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))
@@ -86,7 +86,7 @@ export class MemoryRepository implements IBulkAsset {
const { id } = await tx.insertInto('memory').values(memory).returning('id').executeTakeFirstOrThrow();
if (assetIds.size > 0) {
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetsId: assetId }));
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetId }));
await tx.insertInto('memory_asset').values(values).execute();
}
@@ -116,12 +116,12 @@ export class MemoryRepository implements IBulkAsset {
const results = await this.db
.selectFrom('memory_asset')
.select(['assetsId'])
.select(['assetId'])
.where('memoriesId', '=', id)
.where('assetsId', 'in', assetIds)
.where('assetId', 'in', assetIds)
.execute();
return new Set(results.map(({ assetsId }) => assetsId));
return new Set(results.map(({ assetId }) => assetId));
}
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.UUID]] })
@@ -132,7 +132,7 @@ export class MemoryRepository implements IBulkAsset {
await this.db
.insertInto('memory_asset')
.values(assetIds.map((assetId) => ({ memoriesId: id, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ memoriesId: id, assetId })))
.execute();
}
@@ -143,7 +143,7 @@ export class MemoryRepository implements IBulkAsset {
return;
}
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetId', 'in', assetIds).execute();
}
private getByIdBuilder(id: string) {
@@ -155,7 +155,7 @@ export class MemoryRepository implements IBulkAsset {
eb
.selectFrom('asset')
.selectAll('asset')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
.orderBy('asset.fileCreatedAt', 'asc')
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))

View File

@@ -6,15 +6,15 @@ import { DB } from 'src/schema';
export class SharedLinkAssetRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
async remove(sharedLinkId: string, assetsId: string[]) {
async remove(sharedLinkId: string, assetId: string[]) {
const deleted = await this.db
.deleteFrom('shared_link_asset')
.where('shared_link_asset.sharedLinksId', '=', sharedLinkId)
.where('shared_link_asset.assetsId', 'in', assetsId)
.returning('assetsId')
.where('shared_link_asset.sharedLinkId', '=', sharedLinkId)
.where('shared_link_asset.assetId', 'in', assetId)
.returning('assetId')
.execute();
return deleted.map((row) => row.assetsId);
return deleted.map((row) => row.assetId);
}
@GenerateSql({ params: [{ sourceAssetId: DummyValue.UUID, targetAssetId: DummyValue.UUID }] })
@@ -24,8 +24,8 @@ export class SharedLinkAssetRepository {
.expression((eb) =>
eb
.selectFrom('shared_link_asset')
.select((eb) => [eb.val(targetAssetId).as('assetsId'), 'shared_link_asset.sharedLinksId'])
.where('shared_link_asset.assetsId', '=', sourceAssetId),
.select((eb) => [eb.val(targetAssetId).as('assetId'), 'shared_link_asset.sharedLinkId'])
.where('shared_link_asset.assetId', '=', sourceAssetId),
)
.onConflict((oc) => oc.doNothing())
.execute();

View File

@@ -28,8 +28,8 @@ export class SharedLinkRepository {
(eb) =>
eb
.selectFrom('shared_link_asset')
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinksId')
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetsId')
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinkId')
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetId')
.where('asset.deletedAt', 'is', null)
.selectAll('asset')
.innerJoinLateral(
@@ -53,13 +53,13 @@ export class SharedLinkRepository {
.selectAll('album')
.whereRef('album.id', '=', 'shared_link.albumId')
.where('album.deletedAt', 'is', null)
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.selectAll('asset')
.whereRef('album_asset.assetsId', '=', 'asset.id')
.whereRef('album_asset.assetId', '=', 'asset.id')
.where('asset.deletedAt', 'is', null)
.innerJoinLateral(
(eb) =>
@@ -123,13 +123,13 @@ export class SharedLinkRepository {
.selectFrom('shared_link')
.selectAll('shared_link')
.where('shared_link.userId', '=', userId)
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.select((eb) => eb.fn.jsonAgg('asset').as('assets'))
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
.where('asset.deletedAt', 'is', null)
.as('assets'),
(join) => join.onTrue(),
@@ -215,7 +215,7 @@ export class SharedLinkRepository {
if (entity.assetIds && entity.assetIds.length > 0) {
await this.db
.insertInto('shared_link_asset')
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
.execute();
}
@@ -233,7 +233,7 @@ export class SharedLinkRepository {
if (entity.assetIds && entity.assetIds.length > 0) {
await this.db
.insertInto('shared_link_asset')
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
.execute();
}
@@ -249,12 +249,12 @@ export class SharedLinkRepository {
.selectFrom('shared_link')
.selectAll('shared_link')
.where('shared_link.id', '=', id)
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
.selectAll('asset')
.innerJoinLateral(
(eb) =>

View File

@@ -33,8 +33,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'stack'>, withTags = false) => {
eb
.selectFrom('tag')
.select(columns.tag)
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('tag_asset.assetsId', '=', 'asset.id'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('tag_asset.assetId', '=', 'asset.id'),
).as('tags'),
),
)

View File

@@ -143,8 +143,8 @@ class AlbumSync extends BaseSync {
getCreatedAfter({ nowId, userId, afterCreateId }: SyncCreatedAfterOptions) {
return this.db
.selectFrom('album_user')
.select(['albumsId as id', 'createId'])
.where('usersId', '=', userId)
.select(['albumId as id', 'createId'])
.where('userId', '=', userId)
.$if(!!afterCreateId, (qb) => qb.where('createId', '>=', afterCreateId!))
.where('createId', '<', nowId)
.orderBy('createId', 'asc')
@@ -168,8 +168,8 @@ class AlbumSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album', options)
.distinctOn(['album.id', 'album.updateId'])
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.usersId', '=', userId)]))
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.userId', '=', userId)]))
.select([
'album.id',
'album.ownerId',
@@ -190,10 +190,10 @@ class AlbumAssetSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
.select(columns.syncAsset)
.select('album_asset.updateId')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -201,13 +201,13 @@ class AlbumAssetSync extends BaseSync {
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
const userId = options.userId;
return this.upsertQuery('asset', options)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.select(columns.syncAsset)
.select('asset.updateId')
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send updates for assets that the client already knows about
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
@@ -216,11 +216,11 @@ class AlbumAssetSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select('album_asset.updateId')
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
.select(columns.syncAsset)
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -229,10 +229,10 @@ class AlbumAssetExifSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
.select(columns.syncAssetExif)
.select('album_asset.updateId')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -240,13 +240,13 @@ class AlbumAssetExifSync extends BaseSync {
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
const userId = options.userId;
return this.upsertQuery('asset_exif', options)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset_exif.assetId')
.innerJoin('album_asset', 'album_asset.assetId', 'asset_exif.assetId')
.select(columns.syncAssetExif)
.select('asset_exif.updateId')
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send exif updates for assets that the client already knows about
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
@@ -255,11 +255,11 @@ class AlbumAssetExifSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select('album_asset.updateId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
.select(columns.syncAssetExif)
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -268,8 +268,8 @@ class AlbumToAssetSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
.where('album_asset.albumsId', '=', albumId)
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -290,8 +290,8 @@ class AlbumToAssetSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user')
.select(['album_user.albumsId as id'])
.where('album_user.usersId', '=', userId),
.select(['album_user.albumId as id'])
.where('album_user.userId', '=', userId),
),
),
),
@@ -307,10 +307,10 @@ class AlbumToAssetSync extends BaseSync {
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -321,7 +321,7 @@ class AlbumUserSync extends BaseSync {
return this.backfillQuery('album_user', options)
.select(columns.syncAlbumUser)
.select('album_user.updateId')
.where('albumsId', '=', albumId)
.where('albumId', '=', albumId)
.stream();
}
@@ -342,8 +342,8 @@ class AlbumUserSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user')
.select(['album_user.albumsId as id'])
.where('album_user.usersId', '=', userId),
.select(['album_user.albumId as id'])
.where('album_user.userId', '=', userId),
),
),
),
@@ -363,7 +363,7 @@ class AlbumUserSync extends BaseSync {
.select('album_user.updateId')
.where((eb) =>
eb(
'album_user.albumsId',
'album_user.albumId',
'in',
eb
.selectFrom('album')
@@ -373,8 +373,8 @@ class AlbumUserSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user as albumUsers')
.select(['albumUsers.albumsId as id'])
.where('albumUsers.usersId', '=', userId),
.select(['albumUsers.albumId as id'])
.where('albumUsers.userId', '=', userId),
),
),
),
@@ -550,7 +550,7 @@ class MemoryToAssetSync extends BaseSync {
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory_asset', options)
.select(['memoriesId as memoryId', 'assetsId as assetId'])
.select(['memoriesId as memoryId', 'assetId as assetId'])
.select('updateId')
.where('memoriesId', 'in', (eb) => eb.selectFrom('memory').select('id').where('ownerId', '=', options.userId))
.stream();

View File

@@ -97,9 +97,9 @@ export class TagRepository {
const results = await this.db
.selectFrom('tag_asset')
.select(['assetsId as assetId'])
.where('tagsId', '=', tagId)
.where('assetsId', 'in', assetIds)
.select(['assetId as assetId'])
.where('tagId', '=', tagId)
.where('assetId', 'in', assetIds)
.execute();
return new Set(results.map(({ assetId }) => assetId));
@@ -114,7 +114,7 @@ export class TagRepository {
await this.db
.insertInto('tag_asset')
.values(assetIds.map((assetId) => ({ tagsId: tagId, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ tagId, assetId })))
.execute();
}
@@ -125,10 +125,10 @@ export class TagRepository {
return;
}
await this.db.deleteFrom('tag_asset').where('tagsId', '=', tagId).where('assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('tag_asset').where('tagId', '=', tagId).where('assetId', 'in', assetIds).execute();
}
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagsIds: [DummyValue.UUID] }]] })
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagIds: DummyValue.UUID }]] })
@Chunked()
upsertAssetIds(items: Insertable<TagAssetTable>[]) {
if (items.length === 0) {
@@ -147,7 +147,7 @@ export class TagRepository {
@Chunked({ paramIndex: 1 })
replaceAssetTags(assetId: string, tagIds: string[]) {
return this.db.transaction().execute(async (tx) => {
await tx.deleteFrom('tag_asset').where('assetsId', '=', assetId).execute();
await tx.deleteFrom('tag_asset').where('assetId', '=', assetId).execute();
if (tagIds.length === 0) {
return;
@@ -155,7 +155,7 @@ export class TagRepository {
return tx
.insertInto('tag_asset')
.values(tagIds.map((tagId) => ({ tagsId: tagId, assetsId: assetId })))
.values(tagIds.map((tagId) => ({ tagId, assetId })))
.onConflict((oc) => oc.doNothing())
.returningAll()
.execute();
@@ -170,7 +170,7 @@ export class TagRepository {
exists(
selectFrom('tag_closure')
.whereRef('tag.id', '=', 'tag_closure.id_ancestor')
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagsId'),
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagId'),
),
),
)

View File

@@ -29,7 +29,7 @@ export const album_user_after_insert = registerFunction({
body: `
BEGIN
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
RETURN NULL;
END`,
});
@@ -139,8 +139,8 @@ export const album_asset_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO album_asset_audit ("albumId", "assetId")
SELECT "albumsId", "assetsId" FROM OLD
WHERE "albumsId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumsId" FROM OLD));
SELECT "albumId", "assetId" FROM OLD
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
RETURN NULL;
END`,
});
@@ -152,12 +152,12 @@ export const album_user_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO album_audit ("albumId", "userId")
SELECT "albumsId", "usersId"
SELECT "albumId", "userId"
FROM OLD;
IF pg_trigger_depth() = 1 THEN
INSERT INTO album_user_audit ("albumId", "userId")
SELECT "albumsId", "usersId"
SELECT "albumId", "userId"
FROM OLD;
END IF;
@@ -185,7 +185,7 @@ export const memory_asset_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO memory_asset_audit ("memoryId", "assetId")
SELECT "memoriesId", "assetsId" FROM OLD
SELECT "memoriesId", "assetId" FROM OLD
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END`,

View File

@@ -0,0 +1,99 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
// rename columns
await sql`ALTER TABLE "album_asset" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
await sql`ALTER TABLE "album_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME COLUMN "usersId" TO "userId";`.execute(db);
await sql`ALTER TABLE "memory_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "sharedLinksId" TO "sharedLinkId";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "tagsId" TO "tagId";`.execute(db);
// rename constraints
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_albumsId_fkey" TO "album_asset_albumId_fkey";`.execute(db);
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_assetsId_fkey" TO "album_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_albumsId_fkey" TO "album_user_albumId_fkey";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_usersId_fkey" TO "album_user_userId_fkey";`.execute(db);
await sql`ALTER TABLE "memory_asset" RENAME CONSTRAINT "memory_asset_assetsId_fkey" TO "memory_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_assetsId_fkey" TO "shared_link_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_sharedLinksId_fkey" TO "shared_link_asset_sharedLinkId_fkey";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_assetsId_fkey" TO "tag_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_tagsId_fkey" TO "tag_asset_tagId_fkey";`.execute(db);
// rename indexes
await sql`ALTER INDEX "album_asset_albumsId_idx" RENAME TO "album_asset_albumId_idx";`.execute(db);
await sql`ALTER INDEX "album_asset_assetsId_idx" RENAME TO "album_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "album_user_usersId_idx" RENAME TO "album_user_userId_idx";`.execute(db);
await sql`ALTER INDEX "album_user_albumsId_idx" RENAME TO "album_user_albumId_idx";`.execute(db);
await sql`ALTER INDEX "memory_asset_assetsId_idx" RENAME TO "memory_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "shared_link_asset_sharedLinksId_idx" RENAME TO "shared_link_asset_sharedLinkId_idx";`.execute(db);
await sql`ALTER INDEX "shared_link_asset_assetsId_idx" RENAME TO "shared_link_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_assetsId_idx" RENAME TO "tag_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_tagsId_idx" RENAME TO "tag_asset_tagId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_assetsId_tagsId_idx" RENAME TO "tag_asset_assetId_tagId_idx";`.execute(db);
// update triggers and functions
await sql`CREATE OR REPLACE FUNCTION album_user_after_insert()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION album_asset_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO album_asset_audit ("albumId", "assetId")
SELECT "albumId", "assetId" FROM OLD
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION album_user_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO album_audit ("albumId", "userId")
SELECT "albumId", "userId"
FROM OLD;
IF pg_trigger_depth() = 1 THEN
INSERT INTO album_user_audit ("albumId", "userId")
SELECT "albumId", "userId"
FROM OLD;
END IF;
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION memory_asset_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO memory_asset_audit ("memoryId", "assetId")
SELECT "memoriesId", "assetId" FROM OLD
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END
$$;`.execute(db);
// update overrides
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE album SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_after_insert';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_asset_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumId\\", \\"assetId\\" FROM OLD\\n WHERE \\"albumId\\" IN (SELECT \\"id\\" FROM album WHERE \\"id\\" IN (SELECT \\"albumId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_asset_delete_audit';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_user_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_user_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_delete_audit';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"memory_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_asset_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memory WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_memory_asset_delete_audit';`.execute(db);
}
export function down() {
// not implemented
}

View File

@@ -32,7 +32,7 @@ import {
@ForeignKeyConstraint({
columns: ['albumId', 'assetId'],
referenceTable: () => AlbumAssetTable,
referenceColumns: ['albumsId', 'assetsId'],
referenceColumns: ['albumId', 'assetId'],
onUpdate: 'NO ACTION',
onDelete: 'CASCADE',
})

View File

@@ -22,10 +22,10 @@ import {
})
export class AlbumAssetTable {
@ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
albumsId!: string;
albumId!: string;
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
assetsId!: string;
assetId!: string;
@CreateDateColumn()
createdAt!: Generated<Timestamp>;

View File

@@ -37,7 +37,7 @@ export class AlbumUserTable {
nullable: false,
primary: true,
})
albumsId!: string;
albumId!: string;
@ForeignKeyColumn(() => UserTable, {
onDelete: 'CASCADE',
@@ -45,7 +45,7 @@ export class AlbumUserTable {
nullable: false,
primary: true,
})
usersId!: string;
userId!: string;
@Column({ type: 'character varying', default: AlbumUserRole.Editor })
role!: Generated<AlbumUserRole>;

View File

@@ -25,7 +25,7 @@ export class MemoryAssetTable {
memoriesId!: string;
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
assetsId!: string;
assetId!: string;
@CreateDateColumn()
createdAt!: Generated<Timestamp>;

View File

@@ -5,8 +5,8 @@ import { ForeignKeyColumn, Table } from 'src/sql-tools';
@Table('shared_link_asset')
export class SharedLinkAssetTable {
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
assetsId!: string;
assetId!: string;
@ForeignKeyColumn(() => SharedLinkTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
sharedLinksId!: string;
sharedLinkId!: string;
}

View File

@@ -2,12 +2,12 @@ import { AssetTable } from 'src/schema/tables/asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { ForeignKeyColumn, Index, Table } from 'src/sql-tools';
@Index({ columns: ['assetsId', 'tagsId'] })
@Index({ columns: ['assetId', 'tagId'] })
@Table('tag_asset')
export class TagAssetTable {
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
assetsId!: string;
assetId!: string;
@ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
tagsId!: string;
tagId!: string;
}

View File

@@ -402,16 +402,16 @@ describe(AlbumService.name, () => {
mocks.album.update.mockResolvedValue(albumStub.sharedWithAdmin);
mocks.user.get.mockResolvedValue(userStub.user2);
mocks.albumUser.create.mockResolvedValue({
usersId: userStub.user2.id,
albumsId: albumStub.sharedWithAdmin.id,
userId: userStub.user2.id,
albumId: albumStub.sharedWithAdmin.id,
role: AlbumUserRole.Editor,
});
await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, {
albumUsers: [{ userId: authStub.user2.user.id }],
});
expect(mocks.albumUser.create).toHaveBeenCalledWith({
usersId: authStub.user2.user.id,
albumsId: albumStub.sharedWithAdmin.id,
userId: authStub.user2.user.id,
albumId: albumStub.sharedWithAdmin.id,
});
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumInvite', {
id: albumStub.sharedWithAdmin.id,
@@ -439,8 +439,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: userStub.user1.id,
albumId: albumStub.sharedWithUser.id,
userId: userStub.user1.id,
});
expect(mocks.album.getById).toHaveBeenCalledWith(albumStub.sharedWithUser.id, { withAssets: false });
});
@@ -467,8 +467,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: authStub.user1.user.id,
albumId: albumStub.sharedWithUser.id,
userId: authStub.user1.user.id,
});
});
@@ -480,8 +480,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: authStub.user1.user.id,
albumId: albumStub.sharedWithUser.id,
userId: authStub.user1.user.id,
});
});
@@ -515,7 +515,7 @@ describe(AlbumService.name, () => {
role: AlbumUserRole.Editor,
});
expect(mocks.albumUser.update).toHaveBeenCalledWith(
{ albumsId: albumStub.sharedWithAdmin.id, usersId: userStub.admin.id },
{ albumId: albumStub.sharedWithAdmin.id, userId: userStub.admin.id },
{ role: AlbumUserRole.Editor },
);
});
@@ -804,12 +804,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});
@@ -840,12 +840,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-id',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});
@@ -876,12 +876,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
id: 'album-123',
@@ -936,9 +936,9 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
]);
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
id: 'album-123',
@@ -977,12 +977,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
expect(mocks.access.asset.checkPartnerAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
@@ -1014,9 +1014,9 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});

View File

@@ -215,7 +215,7 @@ export class AlbumService extends BaseService {
return results;
}
const albumAssetValues: { albumsId: string; assetsId: string }[] = [];
const albumAssetValues: { albumId: string; assetId: string }[] = [];
const events: { id: string; recipients: string[] }[] = [];
for (const albumId of allowedAlbumIds) {
const existingAssetIds = await this.albumRepository.getAssetIds(albumId, [...allowedAssetIds]);
@@ -228,7 +228,7 @@ export class AlbumService extends BaseService {
results.success = true;
for (const assetId of notPresentAssetIds) {
albumAssetValues.push({ albumsId: albumId, assetsId: assetId });
albumAssetValues.push({ albumId, assetId });
}
await this.albumRepository.update(albumId, {
id: albumId,
@@ -289,7 +289,7 @@ export class AlbumService extends BaseService {
throw new BadRequestException('User not found');
}
await this.albumUserRepository.create({ usersId: userId, albumsId: id, role });
await this.albumUserRepository.create({ userId, albumId: id, role });
await this.eventRepository.emit('AlbumInvite', { id, userId });
}
@@ -317,12 +317,12 @@ export class AlbumService extends BaseService {
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
}
await this.albumUserRepository.delete({ albumsId: id, usersId: userId });
await this.albumUserRepository.delete({ albumId: id, userId });
}
async updateUser(auth: AuthDto, id: string, userId: string, dto: UpdateAlbumUserDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
await this.albumUserRepository.update({ albumsId: id, usersId: userId }, { role: dto.role });
await this.albumUserRepository.update({ albumId: id, userId }, { role: dto.role });
}
private async findOrFail(id: string, options: AlbumInfoOptions) {

View File

@@ -1,4 +1,5 @@
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { defaults } from 'src/config';
@@ -231,7 +232,7 @@ describe(MetadataService.name, () => {
});
});
it('should account for the server being in a non-UTC timezone', async () => {
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
process.env.TZ = 'America/Los_Angeles';
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
@@ -239,7 +240,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T08:00:00.000Z'),
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
}),
);
@@ -856,6 +857,7 @@ describe(MetadataService.name, () => {
tz: 'UTC-11:30',
Rating: 3,
};
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags(tags);
@@ -897,7 +899,7 @@ describe(MetadataService.name, () => {
id: assetStub.image.id,
duration: null,
fileCreatedAt: dateForTest,
localDateTime: dateForTest,
localDateTime: DateTime.fromISO('1970-01-01T00:00:00.000Z').toJSDate(),
}),
);
});
@@ -1595,7 +1597,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
expect(result?.tag).toBe('SonyDateTime2');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-07-07T07:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-07-07T07:00:00');
});
it('should respect full priority order with all date tags present', () => {
@@ -1624,7 +1626,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should use SubSecDateTimeOriginal as it has highest priority
expect(result?.tag).toBe('SubSecDateTimeOriginal');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-01-01T01:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-01-01T01:00:00');
});
it('should handle missing SubSec tags and use available date tags', () => {
@@ -1644,7 +1646,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should use CreationDate when available
expect(result?.tag).toBe('CreationDate');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-07-07T07:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-07-07T07:00:00');
});
it('should handle invalid date formats gracefully', () => {
@@ -1658,7 +1660,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should skip invalid dates and use the first valid one
expect(result?.tag).toBe('GPSDateTime');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-10-10T10:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-10-10T10:00:00');
});
it('should prefer CreationDate over CreateDate', () => {

View File

@@ -2,7 +2,7 @@ import { Injectable } from '@nestjs/common';
import { ContainerDirectoryItem, ExifDateTime, Tags } from 'exiftool-vendored';
import { Insertable } from 'kysely';
import _ from 'lodash';
import { Duration } from 'luxon';
import { DateTime, Duration } from 'luxon';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { join, parse } from 'node:path';
@@ -236,8 +236,8 @@ export class MetadataService extends BaseService {
latitude: number | null = null,
longitude: number | null = null;
if (this.hasGeo(exifTags)) {
latitude = exifTags.GPSLatitude;
longitude = exifTags.GPSLongitude;
latitude = Number(exifTags.GPSLatitude);
longitude = Number(exifTags.GPSLongitude);
if (reverseGeocoding.enabled) {
geo = await this.mapRepository.reverseGeocode({ latitude, longitude });
}
@@ -866,40 +866,47 @@ export class MetadataService extends BaseService {
this.logger.debug(`No timezone information found for asset ${asset.id}: ${asset.originalPath}`);
}
let dateTimeOriginal = dateTime?.toDate();
let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate();
let dateTimeOriginal = dateTime?.toDateTime();
// do not let JavaScript use local timezone
if (dateTimeOriginal && !dateTime?.hasZone) {
dateTimeOriginal = dateTimeOriginal.setZone('UTC', { keepLocalTime: true });
}
// align with whatever timeZone we chose
dateTimeOriginal = dateTimeOriginal?.setZone(timeZone ?? 'UTC');
// store as "local time"
let localDateTime = dateTimeOriginal?.setZone('UTC', { keepLocalTime: true });
if (!localDateTime || !dateTimeOriginal) {
// FileCreateDate is not available on linux, likely because exiftool hasn't integrated the statx syscall yet
// birthtime is not available in Docker on macOS, so it appears as 0
const earliestDate = new Date(
const earliestDate = DateTime.fromMillis(
Math.min(
asset.fileCreatedAt.getTime(),
stats.birthtimeMs ? Math.min(stats.mtimeMs, stats.birthtimeMs) : stats.mtime.getTime(),
),
);
this.logger.debug(
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for asset ${asset.id}: ${asset.originalPath}`,
`No exif date time found, falling back on ${earliestDate.toISO()}, earliest of file creation and modification for asset ${asset.id}: ${asset.originalPath}`,
);
dateTimeOriginal = localDateTime = earliestDate;
}
this.logger.verbose(
`Found local date time ${localDateTime.toISOString()} for asset ${asset.id}: ${asset.originalPath}`,
);
this.logger.verbose(`Found local date time ${localDateTime.toISO()} for asset ${asset.id}: ${asset.originalPath}`);
return {
dateTimeOriginal,
timeZone,
localDateTime,
localDateTime: localDateTime.toJSDate(),
dateTimeOriginal: dateTimeOriginal.toJSDate(),
};
}
private hasGeo(tags: ImmichTags): tags is ImmichTags & { GPSLatitude: number; GPSLongitude: number } {
return (
tags.GPSLatitude !== undefined &&
tags.GPSLongitude !== undefined &&
(tags.GPSLatitude !== 0 || tags.GPSLatitude !== 0)
);
private hasGeo(tags: ImmichTags) {
const lat = Number(tags.GPSLatitude);
const lng = Number(tags.GPSLongitude);
return !Number.isNaN(lat) && !Number.isNaN(lng) && (lat !== 0 || lng !== 0);
}
private getAutoStackId(tags: ImmichTags | null): string | null {

View File

@@ -192,12 +192,12 @@ describe(TagService.name, () => {
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-1', 'tag-2']));
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
mocks.tag.upsertAssetIds.mockResolvedValue([
{ tagsId: 'tag-1', assetsId: 'asset-1' },
{ tagsId: 'tag-1', assetsId: 'asset-2' },
{ tagsId: 'tag-1', assetsId: 'asset-3' },
{ tagsId: 'tag-2', assetsId: 'asset-1' },
{ tagsId: 'tag-2', assetsId: 'asset-2' },
{ tagsId: 'tag-2', assetsId: 'asset-3' },
{ tagId: 'tag-1', assetId: 'asset-1' },
{ tagId: 'tag-1', assetId: 'asset-2' },
{ tagId: 'tag-1', assetId: 'asset-3' },
{ tagId: 'tag-2', assetId: 'asset-1' },
{ tagId: 'tag-2', assetId: 'asset-2' },
{ tagId: 'tag-2', assetId: 'asset-3' },
]);
await expect(
sut.bulkTagAssets(authStub.admin, { tagIds: ['tag-1', 'tag-2'], assetIds: ['asset-1', 'asset-2', 'asset-3'] }),
@@ -205,12 +205,12 @@ describe(TagService.name, () => {
count: 6,
});
expect(mocks.tag.upsertAssetIds).toHaveBeenCalledWith([
{ tagsId: 'tag-1', assetsId: 'asset-1' },
{ tagsId: 'tag-1', assetsId: 'asset-2' },
{ tagsId: 'tag-1', assetsId: 'asset-3' },
{ tagsId: 'tag-2', assetsId: 'asset-1' },
{ tagsId: 'tag-2', assetsId: 'asset-2' },
{ tagsId: 'tag-2', assetsId: 'asset-3' },
{ tagId: 'tag-1', assetId: 'asset-1' },
{ tagId: 'tag-1', assetId: 'asset-2' },
{ tagId: 'tag-1', assetId: 'asset-3' },
{ tagId: 'tag-2', assetId: 'asset-1' },
{ tagId: 'tag-2', assetId: 'asset-2' },
{ tagId: 'tag-2', assetId: 'asset-3' },
]);
});
});

View File

@@ -82,14 +82,14 @@ export class TagService extends BaseService {
]);
const items: Insertable<TagAssetTable>[] = [];
for (const tagsId of tagIds) {
for (const assetsId of assetIds) {
items.push({ tagsId, assetsId });
for (const tagId of tagIds) {
for (const assetId of assetIds) {
items.push({ tagId, assetId });
}
}
const results = await this.tagRepository.upsertAssetIds(items);
for (const assetId of new Set(results.map((item) => item.assetsId))) {
for (const assetId of new Set(results.map((item) => item.assetId))) {
await this.eventRepository.emit('AssetTag', { assetId });
}

View File

@@ -244,12 +244,12 @@ export function inAlbums<O>(qb: SelectQueryBuilder<DB, 'asset', O>, albumIds: st
(eb) =>
eb
.selectFrom('album_asset')
.select('assetsId')
.where('albumsId', '=', anyUuid(albumIds!))
.groupBy('assetsId')
.having((eb) => eb.fn.count('albumsId').distinct(), '=', albumIds.length)
.select('assetId')
.where('albumId', '=', anyUuid(albumIds!))
.groupBy('assetId')
.having((eb) => eb.fn.count('albumId').distinct(), '=', albumIds.length)
.as('has_album'),
(join) => join.onRef('has_album.assetsId', '=', 'asset.id'),
(join) => join.onRef('has_album.assetId', '=', 'asset.id'),
);
}
@@ -258,13 +258,13 @@ export function hasTags<O>(qb: SelectQueryBuilder<DB, 'asset', O>, tagIds: strin
(eb) =>
eb
.selectFrom('tag_asset')
.select('assetsId')
.innerJoin('tag_closure', 'tag_asset.tagsId', 'tag_closure.id_descendant')
.select('assetId')
.innerJoin('tag_closure', 'tag_asset.tagId', 'tag_closure.id_descendant')
.where('tag_closure.id_ancestor', '=', anyUuid(tagIds))
.groupBy('assetsId')
.groupBy('assetId')
.having((eb) => eb.fn.count('tag_closure.id_ancestor').distinct(), '>=', tagIds.length)
.as('has_tags'),
(join) => join.onRef('has_tags.assetsId', '=', 'asset.id'),
(join) => join.onRef('has_tags.assetId', '=', 'asset.id'),
);
}
@@ -285,8 +285,8 @@ export function withTags(eb: ExpressionBuilder<DB, 'asset'>) {
eb
.selectFrom('tag')
.select(columns.tag)
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags');
}
@@ -299,8 +299,8 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'asset', O>, tagId: stri
eb.exists(
eb
.selectFrom('tag_closure')
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tag_closure.id_descendant')
.whereRef('tag_asset.assetsId', '=', 'asset.id')
.innerJoin('tag_asset', 'tag_asset.tagId', 'tag_closure.id_descendant')
.whereRef('tag_asset.assetId', '=', 'asset.id')
.where('tag_closure.id_ancestor', '=', tagId),
),
);
@@ -320,7 +320,7 @@ export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuild
.$if(!!options.albumIds && options.albumIds.length > 0, (qb) => inAlbums(qb, options.albumIds!))
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
.$if(options.tagIds === null, (qb) =>
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('tag_asset').whereRef('assetsId', '=', 'asset.id')))),
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('tag_asset').whereRef('assetId', '=', 'asset.id')))),
)
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
.$if(!!options.createdBefore, (qb) => qb.where('asset.createdAt', '<=', options.createdBefore!))
@@ -403,7 +403,7 @@ export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuild
qb.where('asset.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
)
.$if(!!options.isNotInAlbum && (!options.albumIds || options.albumIds.length === 0), (qb) =>
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('album_asset').whereRef('assetsId', '=', 'asset.id')))),
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('album_asset').whereRef('assetId', '=', 'asset.id')))),
)
.$if(!!options.withExif, withExifInner)
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))

View File

@@ -2,6 +2,7 @@
import { Insertable, Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { createHash, randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { Writable } from 'node:stream';
import { AssetFace } from 'src/database';
import { AuthDto, LoginResponseDto } from 'src/dtos/auth.dto';
@@ -28,7 +29,9 @@ import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MapRepository } from 'src/repositories/map.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
@@ -61,7 +64,9 @@ import { TagAssetTable } from 'src/schema/tables/tag-asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { UserTable } from 'src/schema/tables/user.table';
import { BASE_SERVICE_DEPENDENCIES, BaseService } from 'src/services/base.service';
import { MetadataService } from 'src/services/metadata.service';
import { SyncService } from 'src/services/sync.service';
import { mockEnvData } from 'test/repositories/config.repository.mock';
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, wait } from 'test/utils';
@@ -212,7 +217,7 @@ export class MediumTestContext<S extends BaseService = BaseService> {
async newAlbumUser(dto: { albumId: string; userId: string; role?: AlbumUserRole }) {
const { albumId, userId, role = AlbumUserRole.Editor } = dto;
const result = await this.get(AlbumUserRepository).create({ albumsId: albumId, usersId: userId, role });
const result = await this.get(AlbumUserRepository).create({ albumId, userId, role });
return { albumUser: { albumId, userId, role }, result };
}
@@ -255,9 +260,9 @@ export class MediumTestContext<S extends BaseService = BaseService> {
async newTagAsset(tagBulkAssets: { tagIds: string[]; assetIds: string[] }) {
const tagsAssets: Insertable<TagAssetTable>[] = [];
for (const tagsId of tagBulkAssets.tagIds) {
for (const assetsId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagsId, assetsId });
for (const tagId of tagBulkAssets.tagIds) {
for (const assetId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagId, assetId });
}
}
@@ -305,6 +310,63 @@ export class SyncTestContext extends MediumTestContext<SyncService> {
}
}
const mockDate = new Date('2024-06-01T12:00:00.000Z');
const mockStats = {
mtime: mockDate,
atime: mockDate,
ctime: mockDate,
birthtime: mockDate,
atimeMs: 0,
mtimeMs: 0,
ctimeMs: 0,
birthtimeMs: 0,
};
export class ExifTestContext extends MediumTestContext<MetadataService> {
constructor(database: Kysely<DB>) {
super(MetadataService, {
database,
real: [AssetRepository, AssetJobRepository, MetadataRepository, SystemMetadataRepository, TagRepository],
mock: [ConfigRepository, EventRepository, LoggingRepository, MapRepository, StorageRepository],
});
this.getMock(ConfigRepository).getEnv.mockReturnValue(mockEnvData({}));
this.getMock(EventRepository).emit.mockResolvedValue();
this.getMock(MapRepository).reverseGeocode.mockResolvedValue({ country: null, state: null, city: null });
this.getMock(StorageRepository).stat.mockResolvedValue(mockStats as Stats);
}
getMockStats() {
return mockStats;
}
getGps(assetId: string) {
return this.database
.selectFrom('asset_exif')
.select(['latitude', 'longitude'])
.where('assetId', '=', assetId)
.executeTakeFirstOrThrow();
}
getTags(assetId: string) {
return this.database
.selectFrom('tag')
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.where('tag_asset.assetId', '=', assetId)
.selectAll()
.execute();
}
getDates(assetId: string) {
return this.database
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.where('id', '=', assetId)
.select(['asset.fileCreatedAt', 'asset.localDateTime', 'asset_exif.dateTimeOriginal', 'asset_exif.timeZone'])
.executeTakeFirstOrThrow();
}
}
const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
switch (key) {
case AccessRepository:
@@ -344,6 +406,14 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
return new key(LoggingRepository.create());
}
case MetadataRepository: {
return new key(LoggingRepository.create());
}
case StorageRepository: {
return new key(LoggingRepository.create());
}
case TagRepository: {
return new key(db, LoggingRepository.create());
}
@@ -381,6 +451,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
return automock(key);
}
case MapRepository: {
return automock(MapRepository, { args: [undefined, undefined, { setContext: () => {} }] });
}
case TelemetryRepository: {
return newTelemetryRepositoryMock();
}

View File

@@ -0,0 +1,65 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif date time', () => {
it('should prioritize DateTimeOriginal', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/date-priority-test.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
});
});
it('should extract GPSDateTime with GPS coordinates ', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/gps-datetime.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: 'America/Los_Angeles',
dateTimeOriginal: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-11-15T04:30:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
});
});
it('should ignore the TimeCreated tag', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/time-created.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
const stats = ctx.getMockStats();
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: stats.mtime,
localDateTime: stats.mtime,
fileCreatedAt: stats.mtime,
});
});
});

View File

@@ -0,0 +1,31 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif gps', () => {
it('should handle empty strings', async () => {
const { ctx, sut, asset } = await setup('metadata/gps-position/empty_gps.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getGps(asset.id)).resolves.toEqual({ latitude: null, longitude: null });
});
});

View File

@@ -0,0 +1,34 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif tags', () => {
it('should detect and regular tags', async () => {
const { ctx, sut, asset } = await setup('metadata/tags/picasa.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getTags(asset.id)).resolves.toEqual([
expect.objectContaining({ assetId: asset.id, value: 'Frost', parentId: null }),
expect.objectContaining({ assetId: asset.id, value: 'Yard', parentId: null }),
]);
});
});

View File

@@ -65,42 +65,6 @@ describe(MetadataService.name, () => {
timeZone: null,
},
},
{
description: 'should handle no time zone information and server behind UTC',
serverTimeZone: 'America/Los_Angeles',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2022-01-01T08:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2021-12-31T23:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC in the summer',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:06:01 00:00:00',
},
expected: {
localDateTime: '2022-06-01T00:00:00.000Z',
dateTimeOriginal: '2022-05-31T22:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle a +13:00 time zone',
exifData: {

View File

@@ -74,7 +74,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.Viewer });
await albumUserRepo.update({ albumId: album.id, userId: user1.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -104,7 +104,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user1.id });
await albumUserRepo.delete({ albumId: album.id, userId: user1.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -171,7 +171,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.Viewer });
await albumUserRepo.update({ albumId: album.id, userId: user.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -208,7 +208,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user.id });
await albumUserRepo.delete({ albumId: album.id, userId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([

View File

@@ -217,7 +217,7 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: auth.user.id });
await albumUserRepo.delete({ albumId: album.id, userId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{

Some files were not shown because too many files have changed in this diff Show More