mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 21:01:24 -08:00
Compare commits
67 Commits
refactor/v
...
chore-full
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a147d9c9c0 | ||
|
|
e8e9e7830e | ||
|
|
4fd9e42ce5 | ||
|
|
337e3a8dac | ||
|
|
2dc81e28fc | ||
|
|
f915d4cc90 | ||
|
|
905f4375b0 | ||
|
|
0b3633db4f | ||
|
|
2f40f5aad8 | ||
|
|
2611e2ec20 | ||
|
|
433a3cd339 | ||
|
|
0b487897a4 | ||
|
|
d5c5bdffcb | ||
|
|
dea95ac2e6 | ||
|
|
9e2208b8dd | ||
|
|
6922a92b69 | ||
|
|
7a2c8e0662 | ||
|
|
787158247f | ||
|
|
b0a0b7c2e1 | ||
|
|
cb6d81771d | ||
|
|
8de6ec1a1b | ||
|
|
d27c01ef70 | ||
|
|
d6307b262f | ||
|
|
b2cbefe41e | ||
|
|
da5a72f6de | ||
|
|
45304f1211 | ||
|
|
a4e65a7ea8 | ||
|
|
dd393c8346 | ||
|
|
493cde9d55 | ||
|
|
7705c84b04 | ||
|
|
ce0172b8c1 | ||
|
|
718b3a7b52 | ||
|
|
8a73de018c | ||
|
|
d92df63f84 | ||
|
|
6c6b00067b | ||
|
|
9cc88ed2a6 | ||
|
|
4905bba694 | ||
|
|
853d19dc2d | ||
|
|
c935ae47d0 | ||
|
|
93ab42fa24 | ||
|
|
6913697ad1 | ||
|
|
a4ae86ce29 | ||
|
|
2c50f2e244 | ||
|
|
365abd8906 | ||
|
|
25fb43bbe3 | ||
|
|
125e8cee01 | ||
|
|
c15e9bfa72 | ||
|
|
35e188e6e7 | ||
|
|
3cc9dd126c | ||
|
|
aa69d89b9f | ||
|
|
29c14a3f58 | ||
|
|
0df70365d7 | ||
|
|
c34be73d81 | ||
|
|
f396e9e374 | ||
|
|
821a9d4691 | ||
|
|
cad654586f | ||
|
|
28eb1bc13c | ||
|
|
1e4779cf48 | ||
|
|
0647c22956 | ||
|
|
b8087b4fa2 | ||
|
|
d94cb9641b | ||
|
|
517c3e1d4c | ||
|
|
619de2a5e4 | ||
|
|
79d0e3e1ed | ||
|
|
f5ff36a1f8 | ||
|
|
b5efc9c16e | ||
|
|
1036076b0d |
10
.github/mise.toml
vendored
Normal file
10
.github/mise.toml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter github --frozen-lockfile"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
28
.github/workflows/build-mobile.yml
vendored
28
.github/workflows/build-mobile.yml
vendored
@@ -20,6 +20,30 @@ on:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY:
|
||||
required: true
|
||||
IOS_CERTIFICATE_P12:
|
||||
required: true
|
||||
IOS_CERTIFICATE_PASSWORD:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
FASTLANE_TEAM_ID:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -141,7 +165,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
@@ -293,7 +317,7 @@ jobs:
|
||||
security delete-keychain build.keychain || true
|
||||
|
||||
- name: Upload IPA artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: ios-release-ipa
|
||||
path: mobile/ios/Runner.ipa
|
||||
|
||||
4
.github/workflows/cli.yml
vendored
4
.github/workflows/cli.yml
vendored
@@ -84,7 +84,7 @@ jobs:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:9c905a4ff69f00c4b2f98b40b6090ab3ab18d1a15ed1379733b8691aa1fcb271
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -83,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
2
.github/workflows/docs-build.yml
vendored
2
.github/workflows/docs-build.yml
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
run: pnpm build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
|
||||
6
.github/workflows/docs-deploy.yml
vendored
6
.github/workflows/docs-deploy.yml
vendored
@@ -174,7 +174,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf apply'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: |
|
||||
mise run tf output -- -json | jq -r '
|
||||
mise run //deployment:tf output -- -json | jq -r '
|
||||
"projectName=\(.pages_project_name.value)",
|
||||
"subdomain=\(.immich_app_branch_subdomain.value)"
|
||||
' >> $GITHUB_OUTPUT
|
||||
@@ -211,7 +211,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs-release'
|
||||
run: 'mise run tf apply'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
|
||||
2
.github/workflows/docs-destroy.yml
vendored
2
.github/workflows/docs-destroy.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf destroy -- -refresh=false'
|
||||
run: 'mise run //deployment:tf destroy -- -refresh=false'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
|
||||
18
.github/workflows/prepare-release.yml
vendored
18
.github/workflows/prepare-release.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
@@ -99,6 +99,20 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
environment: production
|
||||
@@ -124,7 +138,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
@@ -382,6 +382,7 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
@@ -562,7 +563,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
|
||||
29
cli/mise.toml
Normal file
29
cli/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter @immich/cli --frozen-lockfile"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite build"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite"
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.99",
|
||||
"version": "2.2.101",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/node": "^22.19.0",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
|
||||
20
deployment/mise.toml
Normal file
20
deployment/mise.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[tools]
|
||||
terragrunt = "0.91.2"
|
||||
opentofu = "1.10.6"
|
||||
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
description = "Format terragrunt files"
|
||||
|
||||
[tasks.tf]
|
||||
run = "terragrunt run --all"
|
||||
description = "Wrapper for terragrunt run-all"
|
||||
dir = "{{cwd}}"
|
||||
|
||||
[tasks."tf:fmt"]
|
||||
run = "tofu fmt -recursive tf/"
|
||||
description = "Format terraform files"
|
||||
|
||||
[tasks."tf:init"]
|
||||
run = { task = "tf init -- -reconfigure" }
|
||||
dir = "{{cwd}}"
|
||||
@@ -83,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7
|
||||
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
@@ -10,16 +10,19 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||
You must install pgvector as it is a prerequisite for VectorChord.
|
||||
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||
|
||||
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||
:::note Supported versions
|
||||
Immich is known to work with Postgres versions `>= 14, < 19`.
|
||||
|
||||
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
|
||||
VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
|
||||
|
||||
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
|
||||
The current accepted range for VectorChord is `>= 0.3, < 0.6`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
|
||||
@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
|
||||
|
||||
```sql title="Count by tag"
|
||||
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
```sql title="Count by tag (per user)"
|
||||
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
@@ -16,48 +16,76 @@ The default configuration looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"ffmpeg": {
|
||||
"crf": 23,
|
||||
"threads": 0,
|
||||
"preset": "ultrafast",
|
||||
"targetVideoCodec": "h264",
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"targetAudioCodec": "aac",
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"targetResolution": "720",
|
||||
"maxBitrate": "0",
|
||||
"bframes": -1,
|
||||
"refs": 0,
|
||||
"gopSize": 0,
|
||||
"temporalAQ": false,
|
||||
"cqMode": "auto",
|
||||
"twoPass": false,
|
||||
"preferredHwDevice": "auto",
|
||||
"transcode": "required",
|
||||
"tonemap": "hable",
|
||||
"accel": "disabled",
|
||||
"accelDecode": false
|
||||
},
|
||||
"backup": {
|
||||
"database": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 02 * * *",
|
||||
"enabled": true,
|
||||
"keepLastAmount": 14
|
||||
}
|
||||
},
|
||||
"ffmpeg": {
|
||||
"accel": "disabled",
|
||||
"accelDecode": false,
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"bframes": -1,
|
||||
"cqMode": "auto",
|
||||
"crf": 23,
|
||||
"gopSize": 0,
|
||||
"maxBitrate": "0",
|
||||
"preferredHwDevice": "auto",
|
||||
"preset": "ultrafast",
|
||||
"refs": 0,
|
||||
"targetAudioCodec": "aac",
|
||||
"targetResolution": "720",
|
||||
"targetVideoCodec": "h264",
|
||||
"temporalAQ": false,
|
||||
"threads": 0,
|
||||
"tonemap": "hable",
|
||||
"transcode": "required",
|
||||
"twoPass": false
|
||||
},
|
||||
"image": {
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false,
|
||||
"fullsize": {
|
||||
"enabled": false,
|
||||
"format": "jpeg",
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"quality": 80,
|
||||
"size": 1440
|
||||
},
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"quality": 80,
|
||||
"size": 250
|
||||
}
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"smartSearch": {
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"metadataExtraction": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"ocr": {
|
||||
"concurrency": 1
|
||||
},
|
||||
"search": {
|
||||
"concurrency": 5
|
||||
@@ -65,20 +93,23 @@ The default configuration looks like this:
|
||||
"sidecar": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
"smartSearch": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"thumbnailGeneration": {
|
||||
"concurrency": 3
|
||||
},
|
||||
"videoConversion": {
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"cronExpression": "0 0 * * *",
|
||||
"enabled": true
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
@@ -86,8 +117,11 @@ The default configuration looks like this:
|
||||
"level": "log"
|
||||
},
|
||||
"machineLearning": {
|
||||
"enabled": true,
|
||||
"urls": ["http://immich-machine-learning:3003"],
|
||||
"availabilityChecks": {
|
||||
"enabled": true,
|
||||
"interval": 30000,
|
||||
"timeout": 2000
|
||||
},
|
||||
"clip": {
|
||||
"enabled": true,
|
||||
"modelName": "ViT-B-32__openai"
|
||||
@@ -96,27 +130,59 @@ The default configuration looks like this:
|
||||
"enabled": true,
|
||||
"maxDistance": 0.01
|
||||
},
|
||||
"enabled": true,
|
||||
"facialRecognition": {
|
||||
"enabled": true,
|
||||
"modelName": "buffalo_l",
|
||||
"minScore": 0.7,
|
||||
"maxDistance": 0.5,
|
||||
"minFaces": 3
|
||||
}
|
||||
"minFaces": 3,
|
||||
"minScore": 0.7,
|
||||
"modelName": "buffalo_l"
|
||||
},
|
||||
"ocr": {
|
||||
"enabled": true,
|
||||
"maxResolution": 736,
|
||||
"minDetectionScore": 0.5,
|
||||
"minRecognitionScore": 0.8,
|
||||
"modelName": "PP-OCRv5_mobile"
|
||||
},
|
||||
"urls": ["http://immich-machine-learning:3003"]
|
||||
},
|
||||
"map": {
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
|
||||
"enabled": true,
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
|
||||
},
|
||||
"metadata": {
|
||||
"faces": {
|
||||
"import": false
|
||||
}
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"nightlyTasks": {
|
||||
"clusterNewFaces": true,
|
||||
"databaseCleanup": true,
|
||||
"generateMemories": true,
|
||||
"missingThumbnails": true,
|
||||
"startTime": "00:00",
|
||||
"syncQuotaUsage": true
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"host": "",
|
||||
"ignoreCert": false,
|
||||
"password": "",
|
||||
"port": 587,
|
||||
"secure": false,
|
||||
"username": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"oauth": {
|
||||
"autoLaunch": false,
|
||||
"autoRegister": true,
|
||||
@@ -128,70 +194,44 @@ The default configuration looks like this:
|
||||
"issuerUrl": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
"mobileRedirectUri": "",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"roleClaim": "immich_role",
|
||||
"scope": "openid email profile",
|
||||
"signingAlgorithm": "RS256",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"storageLabelClaim": "preferred_username",
|
||||
"storageQuotaClaim": "immich_quota"
|
||||
"storageQuotaClaim": "immich_quota",
|
||||
"timeout": 30000,
|
||||
"tokenEndpointAuthMethod": "client_secret_post"
|
||||
},
|
||||
"passwordLogin": {
|
||||
"enabled": true
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": "",
|
||||
"publicUsers": true
|
||||
},
|
||||
"storageTemplate": {
|
||||
"enabled": false,
|
||||
"hashVerificationEnabled": true,
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"image": {
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"size": 250,
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"size": 1440,
|
||||
"quality": 80
|
||||
},
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"trash": {
|
||||
"enabled": true,
|
||||
"days": 30
|
||||
"templates": {
|
||||
"email": {
|
||||
"albumInviteTemplate": "",
|
||||
"albumUpdateTemplate": "",
|
||||
"welcomeTemplate": ""
|
||||
}
|
||||
},
|
||||
"theme": {
|
||||
"customCss": ""
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 0 * * *"
|
||||
},
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": ""
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"ignoreCert": false,
|
||||
"host": "",
|
||||
"port": 587,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
}
|
||||
"trash": {
|
||||
"days": 30,
|
||||
"enabled": true
|
||||
},
|
||||
"user": {
|
||||
"deleteDelay": 7
|
||||
|
||||
@@ -149,29 +149,31 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
|
||||
25
docs/mise.toml
Normal file
25
docs/mise.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter documentation --frozen-lockfile"
|
||||
|
||||
[tasks.start]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus --port 3005"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = [
|
||||
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"docusaurus build",
|
||||
]
|
||||
|
||||
[tasks.preview]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus serve"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
8
docs/static/archived-versions.json
vendored
8
docs/static/archived-versions.json
vendored
@@ -1,4 +1,12 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.2.3",
|
||||
"url": "https://docs.v2.2.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
|
||||
29
e2e/mise.toml
Normal file
29
e2e/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter immich-e2e --frozen-lockfile"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vitest --run"
|
||||
|
||||
[tasks."test-web"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "playwright test"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/node": "^22.19.0",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
|
||||
@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreateDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
@@ -52,7 +52,7 @@ test.describe('User Administration', () => {
|
||||
|
||||
await page.goto(`/admin/users/${user.userId}`);
|
||||
|
||||
await page.getByRole('button', { name: 'Edit user' }).click();
|
||||
await page.getByRole('button', { name: 'Edit' }).click();
|
||||
await expect(page.getByLabel('Admin User')).not.toBeChecked();
|
||||
await page.getByText('Admin User').click();
|
||||
await expect(page.getByLabel('Admin User')).toBeChecked();
|
||||
@@ -77,7 +77,7 @@ test.describe('User Administration', () => {
|
||||
|
||||
await page.goto(`/admin/users/${user.userId}`);
|
||||
|
||||
await page.getByRole('button', { name: 'Edit user' }).click();
|
||||
await page.getByRole('button', { name: 'Edit' }).click();
|
||||
await expect(page.getByLabel('Admin User')).toBeChecked();
|
||||
await page.getByText('Admin User').click();
|
||||
await expect(page.getByLabel('Admin User')).not.toBeChecked();
|
||||
|
||||
Submodule e2e/test-assets updated: 37f60ea537...163c251744
14
i18n/en.json
14
i18n/en.json
@@ -475,6 +475,7 @@
|
||||
"allow_edits": "Allow edits",
|
||||
"allow_public_user_to_download": "Allow public user to download",
|
||||
"allow_public_user_to_upload": "Allow public user to upload",
|
||||
"allowed": "Allowed",
|
||||
"alt_text_qr_code": "QR code image",
|
||||
"anti_clockwise": "Anti-clockwise",
|
||||
"api_key": "API Key",
|
||||
@@ -1196,6 +1197,8 @@
|
||||
"import_path": "Import path",
|
||||
"in_albums": "In {count, plural, one {# album} other {# albums}}",
|
||||
"in_archive": "In archive",
|
||||
"in_year": "In {year}",
|
||||
"in_year_selector": "In",
|
||||
"include_archived": "Include archived",
|
||||
"include_shared_albums": "Include shared albums",
|
||||
"include_shared_partner_assets": "Include shared partner assets",
|
||||
@@ -1232,6 +1235,7 @@
|
||||
"language_setting_description": "Select your preferred language",
|
||||
"large_files": "Large Files",
|
||||
"last": "Last",
|
||||
"last_months": "{count, plural, one {Last month} other {Last # months}}",
|
||||
"last_seen": "Last seen",
|
||||
"latest_version": "Latest Version",
|
||||
"latitude": "Latitude",
|
||||
@@ -1314,6 +1318,10 @@
|
||||
"main_menu": "Main menu",
|
||||
"make": "Make",
|
||||
"manage_geolocation": "Manage location",
|
||||
"manage_media_access_rationale": "This permission is required for proper handling of moving assets to the trash and restoring them from it.",
|
||||
"manage_media_access_settings": "Open settings",
|
||||
"manage_media_access_subtitle": "Allow the Immich app to manage and move media files.",
|
||||
"manage_media_access_title": "Media Management Access",
|
||||
"manage_shared_links": "Manage shared links",
|
||||
"manage_sharing_with_partners": "Manage sharing with partners",
|
||||
"manage_the_app_settings": "Manage the app settings",
|
||||
@@ -1406,6 +1414,7 @@
|
||||
"new_pin_code": "New PIN code",
|
||||
"new_pin_code_subtitle": "This is your first time accessing the locked folder. Create a PIN code to securely access this page",
|
||||
"new_timeline": "New Timeline",
|
||||
"new_update": "New update",
|
||||
"new_user_created": "New user created",
|
||||
"new_version_available": "NEW VERSION AVAILABLE",
|
||||
"newest_first": "Newest first",
|
||||
@@ -1421,6 +1430,7 @@
|
||||
"no_cast_devices_found": "No cast devices found",
|
||||
"no_checksum_local": "No checksum available - cannot fetch local assets",
|
||||
"no_checksum_remote": "No checksum available - cannot fetch remote asset",
|
||||
"no_devices": "No authorized devices",
|
||||
"no_duplicates_found": "No duplicates were found.",
|
||||
"no_exif_info_available": "No exif info available",
|
||||
"no_explore_results_message": "Upload more photos to explore your collection.",
|
||||
@@ -1437,6 +1447,7 @@
|
||||
"no_results_description": "Try a synonym or more general keyword",
|
||||
"no_shared_albums_message": "Create an album to share photos and videos with people in your network",
|
||||
"no_uploads_in_progress": "No uploads in progress",
|
||||
"not_allowed": "Not allowed",
|
||||
"not_available": "N/A",
|
||||
"not_in_any_album": "Not in any album",
|
||||
"not_selected": "Not selected",
|
||||
@@ -1547,6 +1558,8 @@
|
||||
"photos_count": "{count, plural, one {{count, number} Photo} other {{count, number} Photos}}",
|
||||
"photos_from_previous_years": "Photos from previous years",
|
||||
"pick_a_location": "Pick a location",
|
||||
"pick_custom_range": "Custom range",
|
||||
"pick_date_range": "Select a date range",
|
||||
"pin_code_changed_successfully": "Successfully changed PIN code",
|
||||
"pin_code_reset_successfully": "Successfully reset PIN code",
|
||||
"pin_code_setup_successfully": "Successfully setup a PIN code",
|
||||
@@ -2027,6 +2040,7 @@
|
||||
"third_party_resources": "Third-Party Resources",
|
||||
"time": "Time",
|
||||
"time_based_memories": "Time-based memories",
|
||||
"time_based_memories_duration": "Number of seconds to display each image.",
|
||||
"timeline": "Timeline",
|
||||
"timezone": "Timezone",
|
||||
"to_archive": "Archive",
|
||||
|
||||
@@ -13,6 +13,8 @@ from rich.logging import RichHandler
|
||||
from uvicorn import Server
|
||||
from uvicorn.workers import UvicornWorker
|
||||
|
||||
from .schemas import ModelPrecision
|
||||
|
||||
|
||||
class ClipSettings(BaseModel):
|
||||
textual: str | None = None
|
||||
@@ -24,6 +26,11 @@ class FacialRecognitionSettings(BaseModel):
|
||||
detection: str | None = None
|
||||
|
||||
|
||||
class OcrSettings(BaseModel):
|
||||
recognition: str | None = None
|
||||
detection: str | None = None
|
||||
|
||||
|
||||
class PreloadModelData(BaseModel):
|
||||
clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None)
|
||||
facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None)
|
||||
@@ -37,6 +44,7 @@ class PreloadModelData(BaseModel):
|
||||
del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"]
|
||||
clip: ClipSettings = ClipSettings()
|
||||
facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings()
|
||||
ocr: OcrSettings = OcrSettings()
|
||||
|
||||
|
||||
class MaxBatchSize(BaseModel):
|
||||
@@ -70,6 +78,7 @@ class Settings(BaseSettings):
|
||||
rknn_threads: int = 1
|
||||
preload: PreloadModelData | None = None
|
||||
max_batch_size: MaxBatchSize | None = None
|
||||
openvino_precision: ModelPrecision = ModelPrecision.FP32
|
||||
|
||||
@property
|
||||
def device_id(self) -> str:
|
||||
|
||||
@@ -103,6 +103,20 @@ async def preload_models(preload: PreloadModelData) -> None:
|
||||
ModelTask.FACIAL_RECOGNITION,
|
||||
)
|
||||
|
||||
if preload.ocr.detection is not None:
|
||||
await load_models(
|
||||
preload.ocr.detection,
|
||||
ModelType.DETECTION,
|
||||
ModelTask.OCR,
|
||||
)
|
||||
|
||||
if preload.ocr.recognition is not None:
|
||||
await load_models(
|
||||
preload.ocr.recognition,
|
||||
ModelType.RECOGNITION,
|
||||
ModelTask.OCR,
|
||||
)
|
||||
|
||||
if preload.clip_fallback is not None:
|
||||
log.warning(
|
||||
"Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. "
|
||||
|
||||
@@ -78,6 +78,14 @@ _INSIGHTFACE_MODELS = {
|
||||
_PADDLE_MODELS = {
|
||||
"PP-OCRv5_server",
|
||||
"PP-OCRv5_mobile",
|
||||
"CH__PP-OCRv5_server",
|
||||
"CH__PP-OCRv5_mobile",
|
||||
"EL__PP-OCRv5_mobile",
|
||||
"EN__PP-OCRv5_mobile",
|
||||
"ESLAV__PP-OCRv5_mobile",
|
||||
"KOREAN__PP-OCRv5_mobile",
|
||||
"LATIN__PP-OCRv5_mobile",
|
||||
"TH__PP-OCRv5_mobile",
|
||||
}
|
||||
|
||||
SUPPORTED_PROVIDERS = [
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
|
||||
from rapidocr.ch_ppocr_det.utils import DBPostProcess
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
from rapidocr.utils import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
|
||||
@@ -10,11 +12,10 @@ from rapidocr.utils.typings import ModelType as RapidModelType
|
||||
|
||||
from immich_ml.config import log
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import decode_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
from .schemas import OcrOptions, TextDetectionOutput
|
||||
from .schemas import TextDetectionOutput
|
||||
|
||||
|
||||
class TextDetector(InferenceModel):
|
||||
@@ -22,15 +23,22 @@ class TextDetector(InferenceModel):
|
||||
identity = (ModelType.DETECTION, ModelTask.OCR)
|
||||
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
super().__init__(model_name.split("__")[-1], **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
self.max_resolution = 736
|
||||
self.min_score = 0.5
|
||||
self.score_mode = "fast"
|
||||
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
|
||||
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
|
||||
self._empty: TextDetectionOutput = {
|
||||
"image": np.empty(0, dtype=np.float32),
|
||||
"boxes": np.empty(0, dtype=np.float32),
|
||||
"scores": np.empty(0, dtype=np.float32),
|
||||
}
|
||||
self.postprocess = DBPostProcess(
|
||||
thresh=0.3,
|
||||
box_thresh=model_kwargs.get("minScore", 0.5),
|
||||
max_candidates=1000,
|
||||
unclip_ratio=1.6,
|
||||
use_dilation=True,
|
||||
score_mode="fast",
|
||||
)
|
||||
|
||||
def _download(self) -> None:
|
||||
model_info = InferSession.get_model_url(
|
||||
@@ -52,35 +60,65 @@ class TextDetector(InferenceModel):
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
# TODO: support other runtime sessions
|
||||
session = OrtSession(self.model_path)
|
||||
self.model = RapidTextDetector(
|
||||
OcrOptions(
|
||||
session=session.session,
|
||||
limit_side_len=self.max_resolution,
|
||||
limit_type="min",
|
||||
box_thresh=self.min_score,
|
||||
score_mode=self.score_mode,
|
||||
)
|
||||
)
|
||||
return session
|
||||
return OrtSession(self.model_path)
|
||||
|
||||
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
|
||||
results = self.model(decode_cv2(inputs))
|
||||
if results.boxes is None or results.scores is None or results.img is None:
|
||||
# partly adapted from RapidOCR
|
||||
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
|
||||
w, h = inputs.size
|
||||
if w < 32 or h < 32:
|
||||
return self._empty
|
||||
out = self.session.run(None, {"x": self._transform(inputs)})[0]
|
||||
boxes, scores = self.postprocess(out, (h, w))
|
||||
if len(boxes) == 0:
|
||||
return self._empty
|
||||
return {
|
||||
"image": results.img,
|
||||
"boxes": np.array(results.boxes, dtype=np.float32),
|
||||
"scores": np.array(results.scores, dtype=np.float32),
|
||||
"boxes": self.sorted_boxes(boxes),
|
||||
"scores": np.array(scores, dtype=np.float32),
|
||||
}
|
||||
|
||||
# adapted from RapidOCR
|
||||
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
|
||||
if img.height < img.width:
|
||||
ratio = float(self.max_resolution) / img.height
|
||||
else:
|
||||
ratio = float(self.max_resolution) / img.width
|
||||
|
||||
resize_h = int(img.height * ratio)
|
||||
resize_w = int(img.width * ratio)
|
||||
|
||||
resize_h = int(round(resize_h / 32) * 32)
|
||||
resize_w = int(round(resize_w / 32) * 32)
|
||||
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
|
||||
|
||||
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
|
||||
img_np -= self.mean
|
||||
img_np *= self.std_inv
|
||||
img_np = np.transpose(img_np, (2, 0, 1))
|
||||
return np.expand_dims(img_np, axis=0)
|
||||
|
||||
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
if len(dt_boxes) == 0:
|
||||
return dt_boxes
|
||||
|
||||
# Sort by y, then identify lines, then sort by (line, x)
|
||||
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
|
||||
sorted_y = dt_boxes[y_order, 0, 1]
|
||||
|
||||
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
|
||||
line_ids[0] = 0
|
||||
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
|
||||
|
||||
# Create composite sort key for final ordering
|
||||
# Shift line_ids by large factor, add x for tie-breaking
|
||||
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
|
||||
final_order = np.argsort(sort_key, kind="stable")
|
||||
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
|
||||
return sorted_boxes
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
if (max_resolution := kwargs.get("maxResolution")) is not None:
|
||||
self.max_resolution = max_resolution
|
||||
self.model.limit_side_len = max_resolution
|
||||
if (min_score := kwargs.get("minScore")) is not None:
|
||||
self.min_score = min_score
|
||||
self.model.postprocess_op.box_thresh = min_score
|
||||
self.postprocess.box_thresh = min_score
|
||||
if (score_mode := kwargs.get("scoreMode")) is not None:
|
||||
self.score_mode = score_mode
|
||||
self.model.postprocess_op.score_mode = score_mode
|
||||
self.postprocess.score_mode = score_mode
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL.Image import Image
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_rec import TextRecInput
|
||||
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
@@ -14,6 +13,7 @@ from rapidocr.utils.vis_res import VisRes
|
||||
|
||||
from immich_ml.config import log, settings
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import pil_to_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
@@ -25,6 +25,7 @@ class TextRecognizer(InferenceModel):
|
||||
identity = (ModelType.RECOGNITION, ModelTask.OCR)
|
||||
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
self.language = LangRec[model_name.split("__")[0]] if "__" in model_name else LangRec.CH
|
||||
self.min_score = model_kwargs.get("minScore", 0.9)
|
||||
self._empty: TextRecognitionOutput = {
|
||||
"box": np.empty(0, dtype=np.float32),
|
||||
@@ -41,7 +42,7 @@ class TextRecognizer(InferenceModel):
|
||||
engine_type=EngineType.ONNXRUNTIME,
|
||||
ocr_version=OCRVersion.PPOCRV5,
|
||||
task_type=TaskType.REC,
|
||||
lang_type=LangRec.CH,
|
||||
lang_type=self.language,
|
||||
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
|
||||
)
|
||||
)
|
||||
@@ -61,21 +62,21 @@ class TextRecognizer(InferenceModel):
|
||||
session=session.session,
|
||||
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
|
||||
rec_img_shape=(3, 48, 320),
|
||||
lang_type=self.language,
|
||||
)
|
||||
)
|
||||
return session
|
||||
|
||||
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
|
||||
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, box_scores = texts["boxes"], texts["scores"]
|
||||
if boxes.shape[0] == 0:
|
||||
return self._empty
|
||||
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
|
||||
if rec.txts is None:
|
||||
return self._empty
|
||||
|
||||
height, width = img.shape[0:2]
|
||||
boxes[:, :, 0] /= width
|
||||
boxes[:, :, 1] /= height
|
||||
boxes[:, :, 0] /= img.width
|
||||
boxes[:, :, 1] /= img.height
|
||||
|
||||
text_scores = np.array(rec.scores)
|
||||
valid_text_score_idx = text_scores > self.min_score
|
||||
@@ -87,7 +88,7 @@ class TextRecognizer(InferenceModel):
|
||||
"textScore": text_scores[valid_text_score_idx],
|
||||
}
|
||||
|
||||
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
|
||||
img_crop_width = np.maximum(
|
||||
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
|
||||
).astype(np.int32)
|
||||
@@ -98,22 +99,55 @@ class TextRecognizer(InferenceModel):
|
||||
pts_std[:, 1:3, 0] = img_crop_width[:, None]
|
||||
pts_std[:, 2:4, 1] = img_crop_height[:, None]
|
||||
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
|
||||
imgs: list[NDArray[np.float32]] = []
|
||||
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
|
||||
M = cv2.getPerspectiveTransform(box, pts_std)
|
||||
dst_img: NDArray[np.float32] = cv2.warpPerspective(
|
||||
img,
|
||||
M,
|
||||
dst_size,
|
||||
borderMode=cv2.BORDER_REPLICATE,
|
||||
flags=cv2.INTER_CUBIC,
|
||||
) # type: ignore
|
||||
dst_height, dst_width = dst_img.shape[0:2]
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
|
||||
all_coeffs = self._get_perspective_transform(pts_std, boxes)
|
||||
imgs: list[NDArray[np.uint8]] = []
|
||||
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
|
||||
dst_img = img.transform(
|
||||
size=tuple(dst_size),
|
||||
method=Image.Transform.PERSPECTIVE,
|
||||
data=tuple(coeffs),
|
||||
resample=Image.Resampling.BICUBIC,
|
||||
)
|
||||
|
||||
dst_width, dst_height = dst_img.size
|
||||
if dst_height * 1.0 / dst_width >= 1.5:
|
||||
dst_img = np.rot90(dst_img)
|
||||
imgs.append(dst_img)
|
||||
dst_img = dst_img.rotate(90, expand=True)
|
||||
imgs.append(pil_to_cv2(dst_img))
|
||||
|
||||
return imgs
|
||||
|
||||
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
N = src.shape[0]
|
||||
x, y = src[:, :, 0], src[:, :, 1]
|
||||
u, v = dst[:, :, 0], dst[:, :, 1]
|
||||
A = np.zeros((N, 8, 9), dtype=np.float32)
|
||||
|
||||
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
|
||||
A[:, ::2, 0] = x
|
||||
A[:, ::2, 1] = y
|
||||
A[:, ::2, 2] = 1
|
||||
A[:, ::2, 6] = -u * x
|
||||
A[:, ::2, 7] = -u * y
|
||||
A[:, ::2, 8] = -u
|
||||
|
||||
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
|
||||
A[:, 1::2, 3] = x
|
||||
A[:, 1::2, 4] = y
|
||||
A[:, 1::2, 5] = 1
|
||||
A[:, 1::2, 6] = -v * x
|
||||
A[:, 1::2, 7] = -v * y
|
||||
A[:, 1::2, 8] = -v
|
||||
|
||||
# Solve using SVD for all matrices at once
|
||||
_, _, Vt = np.linalg.svd(A)
|
||||
H = Vt[:, -1, :].reshape(N, 3, 3)
|
||||
H = H / H[:, 2:3, 2:3]
|
||||
|
||||
# Extract the 8 coefficients for each transformation
|
||||
return np.column_stack(
|
||||
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
|
||||
) # pyright: ignore[reportReturnType]
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
self.min_score = kwargs.get("minScore", self.min_score)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing_extensions import TypedDict
|
||||
|
||||
|
||||
class TextDetectionOutput(TypedDict):
|
||||
image: npt.NDArray[np.float32]
|
||||
boxes: npt.NDArray[np.float32]
|
||||
scores: npt.NDArray[np.float32]
|
||||
|
||||
@@ -21,8 +20,8 @@ class TextRecognitionOutput(TypedDict):
|
||||
|
||||
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
|
||||
class OcrOptions(dict[str, Any]):
|
||||
def __init__(self, **options: Any) -> None:
|
||||
def __init__(self, lang_type: LangRec | None = None, **options: Any) -> None:
|
||||
super().__init__(**options)
|
||||
self.engine_type = EngineType.ONNXRUNTIME
|
||||
self.lang_type = LangRec.CH
|
||||
self.lang_type = lang_type
|
||||
self.font_path = None
|
||||
|
||||
@@ -46,6 +46,11 @@ class ModelSource(StrEnum):
|
||||
PADDLE = "paddle"
|
||||
|
||||
|
||||
class ModelPrecision(StrEnum):
|
||||
FP16 = "FP16"
|
||||
FP32 = "FP32"
|
||||
|
||||
|
||||
ModelIdentity = tuple[ModelType, ModelTask]
|
||||
|
||||
|
||||
|
||||
@@ -93,10 +93,12 @@ class OrtSession:
|
||||
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
|
||||
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
|
||||
case "OpenVINOExecutionProvider":
|
||||
openvino_dir = self.model_path.parent / "openvino"
|
||||
device = f"GPU.{settings.device_id}"
|
||||
options = {
|
||||
"device_type": f"GPU.{settings.device_id}",
|
||||
"precision": "FP32",
|
||||
"cache_dir": (self.model_path.parent / "openvino").as_posix(),
|
||||
"device_type": device,
|
||||
"precision": settings.openvino_precision.value,
|
||||
"cache_dir": openvino_dir.as_posix(),
|
||||
}
|
||||
case "CoreMLExecutionProvider":
|
||||
options = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "immich-ml"
|
||||
version = "2.2.1"
|
||||
version = "2.2.3"
|
||||
description = ""
|
||||
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
|
||||
requires-python = ">=3.10,<4.0"
|
||||
|
||||
@@ -26,7 +26,7 @@ from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEn
|
||||
from immich_ml.models.clip.visual import OpenClipVisualEncoder
|
||||
from immich_ml.models.facial_recognition.detection import FaceDetector
|
||||
from immich_ml.models.facial_recognition.recognition import FaceRecognizer
|
||||
from immich_ml.schemas import ModelFormat, ModelTask, ModelType
|
||||
from immich_ml.schemas import ModelFormat, ModelPrecision, ModelTask, ModelType
|
||||
from immich_ml.sessions.ann import AnnSession
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
from immich_ml.sessions.rknn import RknnSession, run_inference
|
||||
@@ -240,11 +240,16 @@ class TestOrtSession:
|
||||
|
||||
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
|
||||
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
|
||||
model_path = "/cache/ViT-B-32__openai/model.onnx"
|
||||
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
|
||||
|
||||
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
|
||||
|
||||
assert session.provider_options == [
|
||||
{"device_type": "GPU.0", "precision": "FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"},
|
||||
{
|
||||
"device_type": "GPU.0",
|
||||
"precision": "FP32",
|
||||
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
|
||||
},
|
||||
{"arena_extend_strategy": "kSameAsRequested"},
|
||||
]
|
||||
|
||||
@@ -262,6 +267,21 @@ class TestOrtSession:
|
||||
}
|
||||
]
|
||||
|
||||
def test_sets_openvino_to_fp16_if_enabled(self, mocker: MockerFixture) -> None:
|
||||
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
|
||||
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
|
||||
mocker.patch.object(settings, "openvino_precision", ModelPrecision.FP16)
|
||||
|
||||
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider"])
|
||||
|
||||
assert session.provider_options == [
|
||||
{
|
||||
"device_type": "GPU.1",
|
||||
"precision": "FP16",
|
||||
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
|
||||
}
|
||||
]
|
||||
|
||||
def test_sets_provider_options_for_cuda(self) -> None:
|
||||
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
|
||||
|
||||
@@ -417,7 +437,7 @@ class TestRknnSession:
|
||||
session.run(None, input_feed)
|
||||
|
||||
rknn_session.return_value.put.assert_called_once_with([input1, input2])
|
||||
np_spy.call_count == 2
|
||||
assert np_spy.call_count == 2
|
||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||
|
||||
|
||||
@@ -925,11 +945,34 @@ class TestCache:
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
async def test_preloads_ocr_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
|
||||
|
||||
settings = Settings()
|
||||
assert settings.preload is not None
|
||||
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
|
||||
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
|
||||
|
||||
model_cache = ModelCache()
|
||||
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
|
||||
|
||||
await preload_models(settings.preload)
|
||||
mock_get_model.assert_has_calls(
|
||||
[
|
||||
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
|
||||
],
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
|
||||
|
||||
settings = Settings()
|
||||
assert settings.preload is not None
|
||||
@@ -937,6 +980,8 @@ class TestCache:
|
||||
assert settings.preload.clip.textual == "ViT-B-32__openai"
|
||||
assert settings.preload.facial_recognition.recognition == "buffalo_s"
|
||||
assert settings.preload.facial_recognition.detection == "buffalo_s"
|
||||
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
|
||||
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
|
||||
|
||||
model_cache = ModelCache()
|
||||
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
|
||||
@@ -948,6 +993,8 @@ class TestCache:
|
||||
mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH),
|
||||
mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION),
|
||||
mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
|
||||
],
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
@@ -88,7 +88,6 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
|
||||
fi
|
||||
|
||||
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
|
||||
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml
|
||||
|
||||
|
||||
515
mise.toml
515
mise.toml
@@ -1,7 +1,9 @@
|
||||
experimental_monorepo_root = true
|
||||
|
||||
[tools]
|
||||
node = "24.11.0"
|
||||
flutter = "3.35.7"
|
||||
pnpm = "10.19.0"
|
||||
pnpm = "10.20.0"
|
||||
terragrunt = "0.91.2"
|
||||
opentofu = "1.10.6"
|
||||
|
||||
@@ -14,514 +16,21 @@ postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
|
||||
experimental = true
|
||||
pin = true
|
||||
|
||||
# .github
|
||||
[tasks."github:install"]
|
||||
run = "pnpm install --filter github --frozen-lockfile"
|
||||
|
||||
[tasks."github:format"]
|
||||
env._.path = "./.github/node_modules/.bin"
|
||||
dir = ".github"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."github:format-fix"]
|
||||
env._.path = "./.github/node_modules/.bin"
|
||||
dir = ".github"
|
||||
run = "prettier --write ."
|
||||
|
||||
# @immich/cli
|
||||
[tasks."cli:install"]
|
||||
run = "pnpm install --filter @immich/cli --frozen-lockfile"
|
||||
|
||||
[tasks."cli:build"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."cli:test"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "vite"
|
||||
|
||||
[tasks."cli:lint"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."cli:lint-fix"]
|
||||
run = "mise run cli:lint --fix"
|
||||
|
||||
[tasks."cli:format"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."cli:format-fix"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."cli:check"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
# @immich/sdk
|
||||
# SDK tasks
|
||||
[tasks."sdk:install"]
|
||||
dir = "open-api/typescript-sdk"
|
||||
run = "pnpm install --filter @immich/sdk --frozen-lockfile"
|
||||
|
||||
[tasks."sdk:build"]
|
||||
env._.path = "./open-api/typescript-sdk/node_modules/.bin"
|
||||
dir = "./open-api/typescript-sdk"
|
||||
dir = "open-api/typescript-sdk"
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc"
|
||||
|
||||
# docs
|
||||
[tasks."docs:install"]
|
||||
run = "pnpm install --filter documentation --frozen-lockfile"
|
||||
|
||||
[tasks."docs:start"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "docusaurus --port 3005"
|
||||
|
||||
[tasks."docs:build"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = [
|
||||
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"docusaurus build",
|
||||
]
|
||||
|
||||
|
||||
[tasks."docs:preview"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "docusaurus serve"
|
||||
|
||||
|
||||
[tasks."docs:format"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."docs:format-fix"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "prettier --write ."
|
||||
|
||||
|
||||
# e2e
|
||||
[tasks."e2e:install"]
|
||||
run = "pnpm install --filter immich-e2e --frozen-lockfile"
|
||||
|
||||
[tasks."e2e:test"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "vitest --run"
|
||||
|
||||
[tasks."e2e:test-web"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "playwright test"
|
||||
|
||||
[tasks."e2e:format"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."e2e:format-fix"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."e2e:lint"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."e2e:lint-fix"]
|
||||
run = "mise run e2e:lint --fix"
|
||||
|
||||
[tasks."e2e:check"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
# i18n
|
||||
# i18n tasks
|
||||
[tasks."i18n:format"]
|
||||
run = "mise run i18n:format-fix"
|
||||
dir = "i18n"
|
||||
run = { task = ":i18n:format-fix" }
|
||||
|
||||
[tasks."i18n:format-fix"]
|
||||
run = "pnpm dlx sort-json ./i18n/*.json"
|
||||
|
||||
|
||||
# server
|
||||
[tasks."server:install"]
|
||||
run = "pnpm install --filter immich --frozen-lockfile"
|
||||
|
||||
[tasks."server:build"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "nest build"
|
||||
|
||||
[tasks."server:test"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "vitest --config test/vitest.config.mjs"
|
||||
|
||||
[tasks."server:test-medium"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "vitest --config test/vitest.config.medium.mjs"
|
||||
|
||||
[tasks."server:format"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."server:format-fix"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."server:lint"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."server:lint-fix"]
|
||||
run = "mise run server:lint --fix"
|
||||
|
||||
[tasks."server:check"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
[tasks."server:sql"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/sync-open-api.js"
|
||||
|
||||
[tasks."server:open-api"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/sync-open-api.js"
|
||||
|
||||
[tasks."server:migrations"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/migrations.js"
|
||||
description = "Run database migration commands (create, generate, run, debug, or query)"
|
||||
|
||||
[tasks."server:schema-drop"]
|
||||
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
|
||||
|
||||
[tasks."server:schema-reset"]
|
||||
run = "mise run server:schema-drop && mise run server:migrations run"
|
||||
|
||||
[tasks."server:email-dev"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "email dev -p 3050 --dir src/emails"
|
||||
|
||||
[tasks."server:checklist"]
|
||||
run = [
|
||||
"mise run server:install",
|
||||
"mise run server:format",
|
||||
"mise run server:lint",
|
||||
"mise run server:check",
|
||||
"mise run server:test-medium --run",
|
||||
"mise run server:test --run",
|
||||
]
|
||||
|
||||
|
||||
# web
|
||||
[tasks."web:install"]
|
||||
run = "pnpm install --filter immich-web --frozen-lockfile"
|
||||
|
||||
[tasks."web:svelte-kit-sync"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "svelte-kit sync"
|
||||
|
||||
[tasks."web:build"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."web:build-stats"]
|
||||
env.BUILD_STATS = "true"
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."web:preview"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite preview"
|
||||
|
||||
[tasks."web:start"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite dev --host 0.0.0.0 --port 3000"
|
||||
|
||||
[tasks."web:test"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vitest"
|
||||
|
||||
[tasks."web:format"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."web:format-fix"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."web:lint"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "eslint . --max-warnings 0 --concurrency 4"
|
||||
|
||||
[tasks."web:lint-fix"]
|
||||
run = "mise run web:lint --fix"
|
||||
|
||||
[tasks."web:check"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
[tasks."web:check-svelte"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "svelte-check --no-tsconfig --fail-on-warnings"
|
||||
|
||||
[tasks."web:checklist"]
|
||||
run = [
|
||||
"mise run web:install",
|
||||
"mise run web:format",
|
||||
"mise run web:check",
|
||||
"mise run web:test --run",
|
||||
"mise run web:lint",
|
||||
]
|
||||
|
||||
|
||||
# mobile
|
||||
[tasks."mobile:codegen:dart"]
|
||||
alias = "mobile:codegen"
|
||||
description = "Execute build_runner to auto-generate dart code"
|
||||
dir = "mobile"
|
||||
sources = [
|
||||
"pubspec.yaml",
|
||||
"build.yaml",
|
||||
"lib/**/*.dart",
|
||||
"infrastructure/**/*.drift",
|
||||
]
|
||||
outputs = { auto = true }
|
||||
run = "dart run build_runner build --delete-conflicting-outputs"
|
||||
|
||||
[tasks."mobile:codegen:pigeon"]
|
||||
alias = "mobile:pigeon"
|
||||
description = "Generate pigeon platform code"
|
||||
dir = "mobile"
|
||||
depends = [
|
||||
"mobile:pigeon:native-sync",
|
||||
"mobile:pigeon:thumbnail",
|
||||
"mobile:pigeon:background-worker",
|
||||
"mobile:pigeon:background-worker-lock",
|
||||
"mobile:pigeon:connectivity",
|
||||
]
|
||||
|
||||
[tasks."mobile:codegen:translation"]
|
||||
alias = "mobile:translation"
|
||||
description = "Generate translations from i18n JSONs"
|
||||
dir = "mobile"
|
||||
run = [
|
||||
{ task = "i18n:format-fix" },
|
||||
{ tasks = [
|
||||
"mobile:i18n:loader",
|
||||
"mobile:i18n:keys",
|
||||
] },
|
||||
]
|
||||
|
||||
[tasks."mobile:codegen:app-icon"]
|
||||
description = "Generate app icons"
|
||||
dir = "mobile"
|
||||
run = "flutter pub run flutter_launcher_icons:main"
|
||||
|
||||
[tasks."mobile:codegen:splash"]
|
||||
description = "Generate splash screen"
|
||||
dir = "mobile"
|
||||
run = "flutter pub run flutter_native_splash:create"
|
||||
|
||||
[tasks."mobile:test"]
|
||||
description = "Run mobile tests"
|
||||
dir = "mobile"
|
||||
run = "flutter test"
|
||||
|
||||
[tasks."mobile:lint"]
|
||||
description = "Analyze Dart code"
|
||||
dir = "mobile"
|
||||
depends = ["mobile:analyze:dart", "mobile:analyze:dcm"]
|
||||
|
||||
[tasks."mobile:lint-fix"]
|
||||
description = "Auto-fix Dart code"
|
||||
dir = "mobile"
|
||||
depends = ["mobile:analyze:fix:dart", "mobile:analyze:fix:dcm"]
|
||||
|
||||
[tasks."mobile:format"]
|
||||
description = "Format Dart code"
|
||||
dir = "mobile"
|
||||
run = "dart format --set-exit-if-changed $(find lib -name '*.dart' -not \\( -name '*.g.dart' -o -name '*.drift.dart' -o -name '*.gr.dart' \\))"
|
||||
|
||||
[tasks."mobile:build:android"]
|
||||
description = "Build Android release"
|
||||
dir = "mobile"
|
||||
run = "flutter build appbundle"
|
||||
|
||||
[tasks."mobile:drift:migration"]
|
||||
alias = "mobile:migration"
|
||||
description = "Generate database migrations"
|
||||
dir = "mobile"
|
||||
run = "dart run drift_dev make-migrations"
|
||||
|
||||
|
||||
# mobile internal tasks
|
||||
[tasks."mobile:pigeon:native-sync"]
|
||||
description = "Generate native sync API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/native_sync_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/native_sync_api.g.dart",
|
||||
"ios/Runner/Sync/Messages.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/sync/Messages.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/native_sync_api.dart",
|
||||
"dart format lib/platform/native_sync_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:thumbnail"]
|
||||
description = "Generate thumbnail API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/thumbnail_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/thumbnail_api.g.dart",
|
||||
"ios/Runner/Images/Thumbnails.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/images/Thumbnails.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/thumbnail_api.dart",
|
||||
"dart format lib/platform/thumbnail_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:background-worker"]
|
||||
description = "Generate background worker API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_api.g.dart",
|
||||
"ios/Runner/Background/BackgroundWorker.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorker.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_api.dart",
|
||||
"dart format lib/platform/background_worker_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:background-worker-lock"]
|
||||
description = "Generate background worker lock API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_lock_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_lock_api.g.dart",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorkerLock.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_lock_api.dart",
|
||||
"dart format lib/platform/background_worker_lock_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:connectivity"]
|
||||
description = "Generate connectivity API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/connectivity_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/connectivity_api.g.dart",
|
||||
"ios/Runner/Connectivity/Connectivity.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/connectivity/Connectivity.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/connectivity_api.dart",
|
||||
"dart format lib/platform/connectivity_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:i18n:loader"]
|
||||
description = "Generate i18n loader"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["i18n/"]
|
||||
outputs = "lib/generated/codegen_loader.g.dart"
|
||||
run = [
|
||||
"dart run easy_localization:generate -S ../i18n",
|
||||
"dart format lib/generated/codegen_loader.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:i18n:keys"]
|
||||
description = "Generate i18n keys"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["i18n/en.json"]
|
||||
outputs = "lib/generated/intl_keys.g.dart"
|
||||
run = [
|
||||
"dart run bin/generate_keys.dart",
|
||||
"dart format lib/generated/intl_keys.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:analyze:dart"]
|
||||
description = "Run Dart analysis"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dart analyze --fatal-infos"
|
||||
|
||||
[tasks."mobile:analyze:dcm"]
|
||||
description = "Run Dart Code Metrics"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dcm analyze lib --fatal-style --fatal-warnings"
|
||||
|
||||
[tasks."mobile:analyze:fix:dart"]
|
||||
description = "Auto-fix Dart analysis"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dart fix --apply"
|
||||
|
||||
[tasks."mobile:analyze:fix:dcm"]
|
||||
description = "Auto-fix Dart Code Metrics"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dcm fix lib"
|
||||
|
||||
# docs deployment
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
description = "Format terragrunt files"
|
||||
|
||||
[tasks.tf]
|
||||
run = "terragrunt run --all"
|
||||
description = "Wrapper for terragrunt run-all"
|
||||
dir = "{{cwd}}"
|
||||
|
||||
[tasks."tf:fmt"]
|
||||
run = "tofu fmt -recursive tf/"
|
||||
description = "Format terraform files"
|
||||
|
||||
[tasks."tf:init"]
|
||||
run = "mise run tf init -- -reconfigure"
|
||||
dir = "{{cwd}}"
|
||||
dir = "i18n"
|
||||
run = "pnpm dlx sort-json *.json"
|
||||
|
||||
@@ -143,7 +143,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
val mediaUrls = call.argument<List<String>>("mediaUrls")
|
||||
if (mediaUrls != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
moveToTrash(mediaUrls, result)
|
||||
moveToTrash(mediaUrls, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
@@ -155,15 +155,23 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
"restoreFromTrash" -> {
|
||||
val fileName = call.argument<String>("fileName")
|
||||
val type = call.argument<Int>("type")
|
||||
val mediaId = call.argument<String>("mediaId")
|
||||
if (fileName != null && type != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
restoreFromTrash(fileName, type, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
} else {
|
||||
result.error("INVALID_NAME", "The file name is not specified.", null)
|
||||
}
|
||||
} else
|
||||
if (mediaId != null && type != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
restoreFromTrashById(mediaId, type, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
} else {
|
||||
result.error("INVALID_PARAMS", "Required params are not specified.", null)
|
||||
}
|
||||
}
|
||||
|
||||
"requestManageMediaPermission" -> {
|
||||
@@ -175,6 +183,17 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
}
|
||||
|
||||
"hasManageMediaPermission" -> {
|
||||
if (hasManageMediaPermission()) {
|
||||
Log.i("Manage storage permission", "Permission already granted")
|
||||
result.success(true)
|
||||
} else {
|
||||
result.success(false)
|
||||
}
|
||||
}
|
||||
|
||||
"manageMediaPermission" -> requestManageMediaPermission(result)
|
||||
|
||||
else -> result.notImplemented()
|
||||
}
|
||||
}
|
||||
@@ -224,25 +243,47 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
|
||||
val activity = activityBinding?.activity
|
||||
val contentResolver = context?.contentResolver
|
||||
if (activity == null || contentResolver == null) {
|
||||
result.error("TrashError", "Activity or ContentResolver not available", null)
|
||||
return
|
||||
}
|
||||
private fun restoreFromTrashById(mediaId: String, type: Int, result: Result) {
|
||||
val id = mediaId.toLongOrNull()
|
||||
if (id == null) {
|
||||
result.error("INVALID_ID", "The file id is not a valid number: $mediaId", null)
|
||||
return
|
||||
}
|
||||
if (!isInTrash(id)) {
|
||||
result.error("TrashNotFound", "Item with id=$id not found in trash", null)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
|
||||
pendingResult = result // Store for onActivityResult
|
||||
activity.startIntentSenderForResult(
|
||||
pendingIntent.intentSender,
|
||||
trashRequestCode,
|
||||
null, 0, 0, 0
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
Log.e("TrashError", "Error creating or starting trash request", e)
|
||||
result.error("TrashError", "Error creating or starting trash request", null)
|
||||
val uri = ContentUris.withAppendedId(contentUriForType(type), id)
|
||||
|
||||
try {
|
||||
Log.i(TAG, "restoreFromTrashById: uri=$uri (type=$type,id=$id)")
|
||||
restoreUris(listOf(uri), result)
|
||||
} catch (e: Exception) {
|
||||
Log.w(TAG, "restoreFromTrashById failed", e)
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
|
||||
val activity = activityBinding?.activity
|
||||
val contentResolver = context?.contentResolver
|
||||
if (activity == null || contentResolver == null) {
|
||||
result.error("TrashError", "Activity or ContentResolver not available", null)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
|
||||
pendingResult = result // Store for onActivityResult
|
||||
activity.startIntentSenderForResult(
|
||||
pendingIntent.intentSender,
|
||||
trashRequestCode,
|
||||
null, 0, 0, 0
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
Log.e("TrashError", "Error creating or starting trash request", e)
|
||||
result.error("TrashError", "Error creating or starting trash request", null)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,14 +305,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
contentResolver.query(queryUri, projection, queryArgs, null)?.use { cursor ->
|
||||
if (cursor.moveToFirst()) {
|
||||
val id = cursor.getLong(cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID))
|
||||
// same order as AssetType from dart
|
||||
val contentUri = when (type) {
|
||||
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
|
||||
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
|
||||
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
|
||||
else -> queryUri
|
||||
}
|
||||
return ContentUris.withAppendedId(contentUri, id)
|
||||
return ContentUris.withAppendedId(contentUriForType(type), id)
|
||||
}
|
||||
}
|
||||
return null
|
||||
@@ -315,6 +349,40 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun isInTrash(id: Long): Boolean {
|
||||
val contentResolver = context?.contentResolver ?: return false
|
||||
val filesUri = MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
|
||||
val args = Bundle().apply {
|
||||
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, "${MediaStore.Files.FileColumns._ID}=?")
|
||||
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, arrayOf(id.toString()))
|
||||
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
|
||||
putInt(ContentResolver.QUERY_ARG_LIMIT, 1)
|
||||
}
|
||||
return contentResolver.query(filesUri, arrayOf(MediaStore.Files.FileColumns._ID), args, null)
|
||||
?.use { it.moveToFirst() } == true
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun restoreUris(uris: List<Uri>, result: Result) {
|
||||
if (uris.isEmpty()) {
|
||||
result.error("TrashError", "No URIs to restore", null)
|
||||
return
|
||||
}
|
||||
Log.i(TAG, "restoreUris: count=${uris.size}, first=${uris.first()}")
|
||||
toggleTrash(uris, false, result)
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.Q)
|
||||
private fun contentUriForType(type: Int): Uri =
|
||||
when (type) {
|
||||
// same order as AssetType from dart
|
||||
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
|
||||
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
|
||||
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
|
||||
else -> MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
|
||||
}
|
||||
}
|
||||
|
||||
private const val TAG = "BackgroundServicePlugin"
|
||||
|
||||
@@ -305,6 +305,7 @@ interface NativeSyncApi {
|
||||
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
|
||||
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
|
||||
fun cancelHashing()
|
||||
fun getTrashedAssets(): Map<String, List<PlatformAsset>>
|
||||
|
||||
companion object {
|
||||
/** The codec used by NativeSyncApi. */
|
||||
@@ -483,6 +484,21 @@ interface NativeSyncApi {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$separatedMessageChannelSuffix", codec, taskQueue)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { _, reply ->
|
||||
val wrapped: List<Any?> = try {
|
||||
listOf(api.getTrashedAssets())
|
||||
} catch (exception: Throwable) {
|
||||
MessagesPigeonUtils.wrapError(exception)
|
||||
}
|
||||
reply.reply(wrapped)
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,4 +21,9 @@ class NativeSyncApiImpl26(context: Context) : NativeSyncApiImplBase(context), Na
|
||||
override fun getMediaChanges(): SyncDelta {
|
||||
throw IllegalStateException("Method not supported on this Android version.")
|
||||
}
|
||||
|
||||
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
|
||||
//Method not supported on this Android version.
|
||||
return emptyMap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package app.alextran.immich.sync
|
||||
|
||||
import android.content.ContentResolver
|
||||
import android.content.Context
|
||||
import android.os.Build
|
||||
import android.os.Bundle
|
||||
import android.provider.MediaStore
|
||||
import androidx.annotation.RequiresApi
|
||||
import androidx.annotation.RequiresExtension
|
||||
@@ -86,4 +88,29 @@ class NativeSyncApiImpl30(context: Context) : NativeSyncApiImplBase(context), Na
|
||||
// Unmounted volumes are handled in dart when the album is removed
|
||||
return SyncDelta(hasChanges, changed, deleted, assetAlbums)
|
||||
}
|
||||
|
||||
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
|
||||
|
||||
val result = LinkedHashMap<String, MutableList<PlatformAsset>>()
|
||||
val volumes = MediaStore.getExternalVolumeNames(ctx)
|
||||
|
||||
for (volume in volumes) {
|
||||
|
||||
val queryArgs = Bundle().apply {
|
||||
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, MEDIA_SELECTION)
|
||||
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, MEDIA_SELECTION_ARGS)
|
||||
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
|
||||
}
|
||||
|
||||
getCursor(volume, queryArgs).use { cursor ->
|
||||
getAssets(cursor).forEach { res ->
|
||||
if (res is AssetResult.ValidAsset) {
|
||||
result.getOrPut(res.albumId) { mutableListOf() }.add(res.asset)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.mapValues { it.value.toList() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import android.annotation.SuppressLint
|
||||
import android.content.ContentUris
|
||||
import android.content.Context
|
||||
import android.database.Cursor
|
||||
import android.net.Uri
|
||||
import android.os.Bundle
|
||||
import android.provider.MediaStore
|
||||
import android.util.Base64
|
||||
import androidx.core.database.getStringOrNull
|
||||
@@ -81,6 +83,16 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
|
||||
sortOrder,
|
||||
)
|
||||
|
||||
protected fun getCursor(
|
||||
volume: String,
|
||||
queryArgs: Bundle
|
||||
): Cursor? = ctx.contentResolver.query(
|
||||
MediaStore.Files.getContentUri(volume),
|
||||
ASSET_PROJECTION,
|
||||
queryArgs,
|
||||
null
|
||||
)
|
||||
|
||||
protected fun getAssets(cursor: Cursor?): Sequence<AssetResult> {
|
||||
return sequence {
|
||||
cursor?.use { c ->
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 3024,
|
||||
"android.injected.version.name" => "2.2.1",
|
||||
"android.injected.version.code" => 3026,
|
||||
"android.injected.version.name" => "2.2.3",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
1
mobile/drift_schemas/main/drift_schema_v13.json
generated
Normal file
1
mobile/drift_schemas/main/drift_schema_v13.json
generated
Normal file
File diff suppressed because one or more lines are too long
@@ -32,6 +32,9 @@
|
||||
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
|
||||
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
|
||||
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
|
||||
FEE084F82EC172460045228E /* SQLiteData in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084F72EC172460045228E /* SQLiteData */; };
|
||||
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */; };
|
||||
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FC2EC1725A0045228E /* StructuredFieldValues */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXContainerItemProxy section */
|
||||
@@ -133,15 +136,11 @@
|
||||
/* Begin PBXFileSystemSynchronizedRootGroup section */
|
||||
B231F52D2E93A44A00BC45D1 /* Core */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Core;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Sync;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -153,6 +152,11 @@
|
||||
path = WidgetExtension;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FEE084F22EC172080045228E /* Schemas */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
path = Schemas;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXFileSystemSynchronizedRootGroup section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@@ -160,6 +164,9 @@
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
FEE084F82EC172460045228E /* SQLiteData in Frameworks */,
|
||||
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */,
|
||||
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */,
|
||||
D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
@@ -254,6 +261,7 @@
|
||||
97C146F01CF9000F007C117D /* Runner */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B25D37792E72CA15008B6CA7 /* Connectivity */,
|
||||
B21E34A62E5AF9760031FDB9 /* Background */,
|
||||
@@ -341,6 +349,7 @@
|
||||
fileSystemSynchronizedGroups = (
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
);
|
||||
name = Runner;
|
||||
productName = Runner;
|
||||
@@ -419,6 +428,10 @@
|
||||
Base,
|
||||
);
|
||||
mainGroup = 97C146E51CF9000F007C117D;
|
||||
packageReferences = (
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */,
|
||||
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */,
|
||||
);
|
||||
preferredProjectObjectVersion = 77;
|
||||
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
|
||||
projectDirPath = "";
|
||||
@@ -530,10 +543,14 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Copy Pods Resources";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
|
||||
@@ -562,10 +579,14 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Embed Pods Frameworks";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
|
||||
@@ -1201,6 +1222,43 @@
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
|
||||
/* Begin XCRemoteSwiftPackageReference section */
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/pointfreeco/sqlite-data";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 1.3.0;
|
||||
};
|
||||
};
|
||||
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/apple/swift-http-structured-headers.git";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 1.5.0;
|
||||
};
|
||||
};
|
||||
/* End XCRemoteSwiftPackageReference section */
|
||||
|
||||
/* Begin XCSwiftPackageProductDependency section */
|
||||
FEE084F72EC172460045228E /* SQLiteData */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */;
|
||||
productName = SQLiteData;
|
||||
};
|
||||
FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
|
||||
productName = RawStructuredFieldValues;
|
||||
};
|
||||
FEE084FC2EC1725A0045228E /* StructuredFieldValues */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
|
||||
productName = StructuredFieldValues;
|
||||
};
|
||||
/* End XCSwiftPackageProductDependency section */
|
||||
};
|
||||
rootObject = 97C146E61CF9000F007C117D /* Project object */;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
{
|
||||
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
|
||||
"pins" : [
|
||||
{
|
||||
"identity" : "combine-schedulers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/combine-schedulers",
|
||||
"state" : {
|
||||
"revision" : "5928286acce13def418ec36d05a001a9641086f2",
|
||||
"version" : "1.0.3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "grdb.swift",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/groue/GRDB.swift",
|
||||
"state" : {
|
||||
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
|
||||
"version" : "7.8.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "sqlite-data",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/sqlite-data",
|
||||
"state" : {
|
||||
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-clocks",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-clocks",
|
||||
"state" : {
|
||||
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
|
||||
"version" : "1.0.6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-collections",
|
||||
"state" : {
|
||||
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-concurrency-extras",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
|
||||
"state" : {
|
||||
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
|
||||
"version" : "1.3.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-custom-dump",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-custom-dump",
|
||||
"state" : {
|
||||
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
|
||||
"version" : "1.3.3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-dependencies",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-dependencies",
|
||||
"state" : {
|
||||
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
|
||||
"version" : "1.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-http-structured-headers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-http-structured-headers.git",
|
||||
"state" : {
|
||||
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
|
||||
"version" : "1.5.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-identified-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-identified-collections",
|
||||
"state" : {
|
||||
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
|
||||
"version" : "1.1.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-perception",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-perception",
|
||||
"state" : {
|
||||
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
|
||||
"version" : "2.0.9"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-sharing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-sharing",
|
||||
"state" : {
|
||||
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
|
||||
"version" : "2.7.4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-snapshot-testing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
|
||||
"state" : {
|
||||
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
|
||||
"version" : "1.18.7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-structured-queries",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-structured-queries",
|
||||
"state" : {
|
||||
"revision" : "1447ea20550f6f02c4b48cc80931c3ed40a9c756",
|
||||
"version" : "0.25.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-syntax",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/swiftlang/swift-syntax",
|
||||
"state" : {
|
||||
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
|
||||
"version" : "602.0.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "xctest-dynamic-overlay",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
|
||||
"state" : {
|
||||
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
|
||||
"version" : "1.7.0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 3
|
||||
}
|
||||
177
mobile/ios/Runner/Schemas/Constants.swift
Normal file
177
mobile/ios/Runner/Schemas/Constants.swift
Normal file
@@ -0,0 +1,177 @@
|
||||
import SQLiteData
|
||||
|
||||
struct Endpoint: Codable {
|
||||
let url: URL
|
||||
let status: Status
|
||||
|
||||
enum Status: String, Codable {
|
||||
case loading, valid, error, unknown
|
||||
}
|
||||
}
|
||||
|
||||
enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
// MARK: - Int
|
||||
case _version = 0
|
||||
static let version = Typed<Int>(rawValue: ._version)
|
||||
case _deviceIdHash = 3
|
||||
static let deviceIdHash = Typed<Int>(rawValue: ._deviceIdHash)
|
||||
case _backupTriggerDelay = 8
|
||||
static let backupTriggerDelay = Typed<Int>(rawValue: ._backupTriggerDelay)
|
||||
case _tilesPerRow = 103
|
||||
static let tilesPerRow = Typed<Int>(rawValue: ._tilesPerRow)
|
||||
case _groupAssetsBy = 105
|
||||
static let groupAssetsBy = Typed<Int>(rawValue: ._groupAssetsBy)
|
||||
case _uploadErrorNotificationGracePeriod = 106
|
||||
static let uploadErrorNotificationGracePeriod = Typed<Int>(rawValue: ._uploadErrorNotificationGracePeriod)
|
||||
case _thumbnailCacheSize = 110
|
||||
static let thumbnailCacheSize = Typed<Int>(rawValue: ._thumbnailCacheSize)
|
||||
case _imageCacheSize = 111
|
||||
static let imageCacheSize = Typed<Int>(rawValue: ._imageCacheSize)
|
||||
case _albumThumbnailCacheSize = 112
|
||||
static let albumThumbnailCacheSize = Typed<Int>(rawValue: ._albumThumbnailCacheSize)
|
||||
case _selectedAlbumSortOrder = 113
|
||||
static let selectedAlbumSortOrder = Typed<Int>(rawValue: ._selectedAlbumSortOrder)
|
||||
case _logLevel = 115
|
||||
static let logLevel = Typed<Int>(rawValue: ._logLevel)
|
||||
case _mapRelativeDate = 119
|
||||
static let mapRelativeDate = Typed<Int>(rawValue: ._mapRelativeDate)
|
||||
case _mapThemeMode = 124
|
||||
static let mapThemeMode = Typed<Int>(rawValue: ._mapThemeMode)
|
||||
|
||||
// MARK: - String
|
||||
case _assetETag = 1
|
||||
static let assetETag = Typed<String>(rawValue: ._assetETag)
|
||||
case _currentUser = 2
|
||||
static let currentUser = Typed<String>(rawValue: ._currentUser)
|
||||
case _deviceId = 4
|
||||
static let deviceId = Typed<String>(rawValue: ._deviceId)
|
||||
case _accessToken = 11
|
||||
static let accessToken = Typed<String>(rawValue: ._accessToken)
|
||||
case _serverEndpoint = 12
|
||||
static let serverEndpoint = Typed<String>(rawValue: ._serverEndpoint)
|
||||
case _sslClientCertData = 15
|
||||
static let sslClientCertData = Typed<String>(rawValue: ._sslClientCertData)
|
||||
case _sslClientPasswd = 16
|
||||
static let sslClientPasswd = Typed<String>(rawValue: ._sslClientPasswd)
|
||||
case _themeMode = 102
|
||||
static let themeMode = Typed<String>(rawValue: ._themeMode)
|
||||
case _customHeaders = 127
|
||||
static let customHeaders = Typed<[String: String]>(rawValue: ._customHeaders)
|
||||
case _primaryColor = 128
|
||||
static let primaryColor = Typed<String>(rawValue: ._primaryColor)
|
||||
case _preferredWifiName = 133
|
||||
static let preferredWifiName = Typed<String>(rawValue: ._preferredWifiName)
|
||||
|
||||
// MARK: - Endpoint
|
||||
case _externalEndpointList = 135
|
||||
static let externalEndpointList = Typed<[Endpoint]>(rawValue: ._externalEndpointList)
|
||||
|
||||
// MARK: - URL
|
||||
case _localEndpoint = 134
|
||||
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
|
||||
case _serverUrl = 10
|
||||
static let serverUrl = Typed<URL>(rawValue: ._serverUrl)
|
||||
|
||||
// MARK: - Date
|
||||
case _backupFailedSince = 5
|
||||
static let backupFailedSince = Typed<Date>(rawValue: ._backupFailedSince)
|
||||
|
||||
// MARK: - Bool
|
||||
case _backupRequireWifi = 6
|
||||
static let backupRequireWifi = Typed<Bool>(rawValue: ._backupRequireWifi)
|
||||
case _backupRequireCharging = 7
|
||||
static let backupRequireCharging = Typed<Bool>(rawValue: ._backupRequireCharging)
|
||||
case _autoBackup = 13
|
||||
static let autoBackup = Typed<Bool>(rawValue: ._autoBackup)
|
||||
case _backgroundBackup = 14
|
||||
static let backgroundBackup = Typed<Bool>(rawValue: ._backgroundBackup)
|
||||
case _loadPreview = 100
|
||||
static let loadPreview = Typed<Bool>(rawValue: ._loadPreview)
|
||||
case _loadOriginal = 101
|
||||
static let loadOriginal = Typed<Bool>(rawValue: ._loadOriginal)
|
||||
case _dynamicLayout = 104
|
||||
static let dynamicLayout = Typed<Bool>(rawValue: ._dynamicLayout)
|
||||
case _backgroundBackupTotalProgress = 107
|
||||
static let backgroundBackupTotalProgress = Typed<Bool>(rawValue: ._backgroundBackupTotalProgress)
|
||||
case _backgroundBackupSingleProgress = 108
|
||||
static let backgroundBackupSingleProgress = Typed<Bool>(rawValue: ._backgroundBackupSingleProgress)
|
||||
case _storageIndicator = 109
|
||||
static let storageIndicator = Typed<Bool>(rawValue: ._storageIndicator)
|
||||
case _advancedTroubleshooting = 114
|
||||
static let advancedTroubleshooting = Typed<Bool>(rawValue: ._advancedTroubleshooting)
|
||||
case _preferRemoteImage = 116
|
||||
static let preferRemoteImage = Typed<Bool>(rawValue: ._preferRemoteImage)
|
||||
case _loopVideo = 117
|
||||
static let loopVideo = Typed<Bool>(rawValue: ._loopVideo)
|
||||
case _mapShowFavoriteOnly = 118
|
||||
static let mapShowFavoriteOnly = Typed<Bool>(rawValue: ._mapShowFavoriteOnly)
|
||||
case _selfSignedCert = 120
|
||||
static let selfSignedCert = Typed<Bool>(rawValue: ._selfSignedCert)
|
||||
case _mapIncludeArchived = 121
|
||||
static let mapIncludeArchived = Typed<Bool>(rawValue: ._mapIncludeArchived)
|
||||
case _ignoreIcloudAssets = 122
|
||||
static let ignoreIcloudAssets = Typed<Bool>(rawValue: ._ignoreIcloudAssets)
|
||||
case _selectedAlbumSortReverse = 123
|
||||
static let selectedAlbumSortReverse = Typed<Bool>(rawValue: ._selectedAlbumSortReverse)
|
||||
case _mapwithPartners = 125
|
||||
static let mapwithPartners = Typed<Bool>(rawValue: ._mapwithPartners)
|
||||
case _enableHapticFeedback = 126
|
||||
static let enableHapticFeedback = Typed<Bool>(rawValue: ._enableHapticFeedback)
|
||||
case _dynamicTheme = 129
|
||||
static let dynamicTheme = Typed<Bool>(rawValue: ._dynamicTheme)
|
||||
case _colorfulInterface = 130
|
||||
static let colorfulInterface = Typed<Bool>(rawValue: ._colorfulInterface)
|
||||
case _syncAlbums = 131
|
||||
static let syncAlbums = Typed<Bool>(rawValue: ._syncAlbums)
|
||||
case _autoEndpointSwitching = 132
|
||||
static let autoEndpointSwitching = Typed<Bool>(rawValue: ._autoEndpointSwitching)
|
||||
case _loadOriginalVideo = 136
|
||||
static let loadOriginalVideo = Typed<Bool>(rawValue: ._loadOriginalVideo)
|
||||
case _manageLocalMediaAndroid = 137
|
||||
static let manageLocalMediaAndroid = Typed<Bool>(rawValue: ._manageLocalMediaAndroid)
|
||||
case _readonlyModeEnabled = 138
|
||||
static let readonlyModeEnabled = Typed<Bool>(rawValue: ._readonlyModeEnabled)
|
||||
case _autoPlayVideo = 139
|
||||
static let autoPlayVideo = Typed<Bool>(rawValue: ._autoPlayVideo)
|
||||
case _photoManagerCustomFilter = 1000
|
||||
static let photoManagerCustomFilter = Typed<Bool>(rawValue: ._photoManagerCustomFilter)
|
||||
case _betaPromptShown = 1001
|
||||
static let betaPromptShown = Typed<Bool>(rawValue: ._betaPromptShown)
|
||||
case _betaTimeline = 1002
|
||||
static let betaTimeline = Typed<Bool>(rawValue: ._betaTimeline)
|
||||
case _enableBackup = 1003
|
||||
static let enableBackup = Typed<Bool>(rawValue: ._enableBackup)
|
||||
case _useWifiForUploadVideos = 1004
|
||||
static let useWifiForUploadVideos = Typed<Bool>(rawValue: ._useWifiForUploadVideos)
|
||||
case _useWifiForUploadPhotos = 1005
|
||||
static let useWifiForUploadPhotos = Typed<Bool>(rawValue: ._useWifiForUploadPhotos)
|
||||
case _needBetaMigration = 1006
|
||||
static let needBetaMigration = Typed<Bool>(rawValue: ._needBetaMigration)
|
||||
case _shouldResetSync = 1007
|
||||
static let shouldResetSync = Typed<Bool>(rawValue: ._shouldResetSync)
|
||||
|
||||
struct Typed<T>: RawRepresentable {
|
||||
let rawValue: StoreKey
|
||||
|
||||
@_transparent
|
||||
init(rawValue value: StoreKey) {
|
||||
self.rawValue = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum BackupSelection: Int, QueryBindable {
|
||||
case selected, none, excluded
|
||||
}
|
||||
|
||||
enum AvatarColor: Int, QueryBindable {
|
||||
case primary, pink, red, yellow, blue, green, purple, orange, gray, amber
|
||||
}
|
||||
|
||||
enum AlbumUserRole: Int, QueryBindable {
|
||||
case editor, viewer
|
||||
}
|
||||
|
||||
enum MemoryType: Int, QueryBindable {
|
||||
case onThisDay
|
||||
}
|
||||
146
mobile/ios/Runner/Schemas/Store.swift
Normal file
146
mobile/ios/Runner/Schemas/Store.swift
Normal file
@@ -0,0 +1,146 @@
|
||||
import SQLiteData
|
||||
|
||||
enum StoreError: Error {
|
||||
case invalidJSON(String)
|
||||
case invalidURL(String)
|
||||
case encodingFailed
|
||||
}
|
||||
|
||||
protocol StoreConvertible {
|
||||
associatedtype StorageType
|
||||
static func fromValue(_ value: StorageType) throws(StoreError) -> Self
|
||||
static func toValue(_ value: Self) throws(StoreError) -> StorageType
|
||||
}
|
||||
|
||||
extension Int: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Int { value }
|
||||
static func toValue(_ value: Int) -> Int { value }
|
||||
}
|
||||
|
||||
extension Bool: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Bool { value == 1 }
|
||||
static func toValue(_ value: Bool) -> Int { value ? 1 : 0 }
|
||||
}
|
||||
|
||||
extension Date: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Date { Date(timeIntervalSince1970: TimeInterval(value) / 1000) }
|
||||
static func toValue(_ value: Date) -> Int { Int(value.timeIntervalSince1970 * 1000) }
|
||||
}
|
||||
|
||||
extension String: StoreConvertible {
|
||||
static func fromValue(_ value: String) -> String { value }
|
||||
static func toValue(_ value: String) -> String { value }
|
||||
}
|
||||
|
||||
extension URL: StoreConvertible {
|
||||
static func fromValue(_ value: String) throws(StoreError) -> URL {
|
||||
guard let url = URL(string: value) else {
|
||||
throw StoreError.invalidURL(value)
|
||||
}
|
||||
return url
|
||||
}
|
||||
static func toValue(_ value: URL) -> String { value.absoluteString }
|
||||
}
|
||||
|
||||
extension StoreConvertible where Self: Codable, StorageType == String {
|
||||
static var jsonDecoder: JSONDecoder { JSONDecoder() }
|
||||
static var jsonEncoder: JSONEncoder { JSONEncoder() }
|
||||
|
||||
static func fromValue(_ value: String) throws(StoreError) -> Self {
|
||||
do {
|
||||
return try jsonDecoder.decode(Self.self, from: Data(value.utf8))
|
||||
} catch {
|
||||
throw StoreError.invalidJSON(value)
|
||||
}
|
||||
}
|
||||
|
||||
static func toValue(_ value: Self) throws(StoreError) -> String {
|
||||
let encoded: Data
|
||||
do {
|
||||
encoded = try jsonEncoder.encode(value)
|
||||
} catch {
|
||||
throw StoreError.encodingFailed
|
||||
}
|
||||
|
||||
guard let string = String(data: encoded, encoding: .utf8) else {
|
||||
throw StoreError.encodingFailed
|
||||
}
|
||||
return string
|
||||
}
|
||||
}
|
||||
|
||||
extension Array: StoreConvertible where Element: Codable {
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
extension Dictionary: StoreConvertible where Key == String, Value: Codable {
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
class StoreRepository {
|
||||
private let db: DatabasePool
|
||||
|
||||
init(db: DatabasePool) {
|
||||
self.db = db
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == Int {
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == String {
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == Int {
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == String {
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
}
|
||||
}
|
||||
237
mobile/ios/Runner/Schemas/Tables.swift
Normal file
237
mobile/ios/Runner/Schemas/Tables.swift
Normal file
@@ -0,0 +1,237 @@
|
||||
import GRDB
|
||||
import SQLiteData
|
||||
|
||||
@Table("asset_face_entity")
|
||||
struct AssetFace {
|
||||
let id: String
|
||||
let assetId: String
|
||||
let personId: String?
|
||||
let imageWidth: Int
|
||||
let imageHeight: Int
|
||||
let boundingBoxX1: Int
|
||||
let boundingBoxY1: Int
|
||||
let boundingBoxX2: Int
|
||||
let boundingBoxY2: Int
|
||||
let sourceType: String
|
||||
}
|
||||
|
||||
@Table("auth_user_entity")
|
||||
struct AuthUser {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
let isAdmin: Bool
|
||||
let hasProfileImage: Bool
|
||||
let profileChangedAt: Date
|
||||
let avatarColor: AvatarColor
|
||||
let quotaSizeInBytes: Int
|
||||
let quotaUsageInBytes: Int
|
||||
let pinCode: String?
|
||||
}
|
||||
|
||||
@Table("local_album_entity")
|
||||
struct LocalAlbum {
|
||||
let id: String
|
||||
let backupSelection: BackupSelection
|
||||
let linkedRemoteAlbumId: String?
|
||||
let marker_: Bool?
|
||||
let name: String
|
||||
let isIosSharedAlbum: Bool
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
@Table("local_album_asset_entity")
|
||||
struct LocalAlbumAsset {
|
||||
let id: ID
|
||||
let marker_: String?
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("local_asset_entity")
|
||||
struct LocalAsset {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let createdAt: Date
|
||||
let durationInSeconds: Int?
|
||||
let height: Int?
|
||||
let isFavorite: Bool
|
||||
let name: String
|
||||
let orientation: String
|
||||
let type: Int
|
||||
let updatedAt: Date
|
||||
let width: Int?
|
||||
}
|
||||
|
||||
@Table("memory_asset_entity")
|
||||
struct MemoryAsset {
|
||||
let id: ID
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("memory_entity")
|
||||
struct Memory {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let deletedAt: Date?
|
||||
let ownerId: String
|
||||
let type: MemoryType
|
||||
let data: String
|
||||
let isSaved: Bool
|
||||
let memoryAt: Date
|
||||
let seenAt: Date?
|
||||
let showAt: Date?
|
||||
let hideAt: Date?
|
||||
}
|
||||
|
||||
@Table("partner_entity")
|
||||
struct Partner {
|
||||
let id: ID
|
||||
let inTimeline: Bool
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let sharedById: String
|
||||
let sharedWithId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("person_entity")
|
||||
struct Person {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let ownerId: String
|
||||
let name: String
|
||||
let faceAssetId: String?
|
||||
let isFavorite: Bool
|
||||
let isHidden: Bool
|
||||
let color: String?
|
||||
let birthDate: Date?
|
||||
}
|
||||
|
||||
@Table("remote_album_entity")
|
||||
struct RemoteAlbum {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let description: String?
|
||||
let isActivityEnabled: Bool
|
||||
let name: String
|
||||
let order: Int
|
||||
let ownerId: String
|
||||
let thumbnailAssetId: String?
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
@Table("remote_album_asset_entity")
|
||||
struct RemoteAlbumAsset {
|
||||
let id: ID
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("remote_album_user_entity")
|
||||
struct RemoteAlbumUser {
|
||||
let id: ID
|
||||
let role: AlbumUserRole
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let albumId: String
|
||||
let userId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("remote_asset_entity")
|
||||
struct RemoteAsset {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let deletedAt: Date?
|
||||
let isFavorite: Int
|
||||
let libraryId: String?
|
||||
let livePhotoVideoId: String?
|
||||
let localDateTime: Date?
|
||||
let orientation: String
|
||||
let ownerId: String
|
||||
let stackId: String?
|
||||
let visibility: Int
|
||||
}
|
||||
|
||||
@Table("remote_exif_entity")
|
||||
struct RemoteExif {
|
||||
@Column(primaryKey: true)
|
||||
let assetId: String
|
||||
let city: String?
|
||||
let state: String?
|
||||
let country: String?
|
||||
let dateTimeOriginal: Date?
|
||||
let description: String?
|
||||
let height: Int?
|
||||
let width: Int?
|
||||
let exposureTime: String?
|
||||
let fNumber: Double?
|
||||
let fileSize: Int?
|
||||
let focalLength: Double?
|
||||
let latitude: Double?
|
||||
let longitude: Double?
|
||||
let iso: Int?
|
||||
let make: String?
|
||||
let model: String?
|
||||
let lens: String?
|
||||
let orientation: String?
|
||||
let timeZone: String?
|
||||
let rating: Int?
|
||||
let projectionType: String?
|
||||
}
|
||||
|
||||
@Table("stack_entity")
|
||||
struct Stack {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let ownerId: String
|
||||
let primaryAssetId: String
|
||||
}
|
||||
|
||||
@Table("store_entity")
|
||||
struct Store {
|
||||
let id: StoreKey
|
||||
let stringValue: String?
|
||||
let intValue: Int?
|
||||
}
|
||||
|
||||
@Table("user_entity")
|
||||
struct User {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
let hasProfileImage: Bool
|
||||
let profileChangedAt: Date
|
||||
let avatarColor: AvatarColor
|
||||
}
|
||||
|
||||
@Table("user_metadata_entity")
|
||||
struct UserMetadata {
|
||||
let id: ID
|
||||
let value: Data
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let userId: String
|
||||
let key: Date
|
||||
}
|
||||
}
|
||||
@@ -364,6 +364,7 @@ protocol NativeSyncApi {
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
|
||||
func cancelHashing() throws
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]]
|
||||
}
|
||||
|
||||
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
|
||||
@@ -532,5 +533,20 @@ class NativeSyncApiSetup {
|
||||
} else {
|
||||
cancelHashingChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getTrashedAssetsChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getTrashedAssetsChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.getTrashedAssets()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getTrashedAssetsChannel.setMessageHandler(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,15 +3,15 @@ import CryptoKit
|
||||
|
||||
struct AssetWrapper: Hashable, Equatable {
|
||||
let asset: PlatformAsset
|
||||
|
||||
|
||||
init(with asset: PlatformAsset) {
|
||||
self.asset = asset
|
||||
}
|
||||
|
||||
|
||||
func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(self.asset.id)
|
||||
}
|
||||
|
||||
|
||||
static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool {
|
||||
return lhs.asset.id == rhs.asset.id
|
||||
}
|
||||
@@ -19,31 +19,31 @@ struct AssetWrapper: Hashable, Equatable {
|
||||
|
||||
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
static let name = "NativeSyncApi"
|
||||
|
||||
|
||||
static func register(with registrar: any FlutterPluginRegistrar) {
|
||||
let instance = NativeSyncApiImpl()
|
||||
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
|
||||
registrar.publish(instance)
|
||||
}
|
||||
|
||||
|
||||
func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
|
||||
super.detachFromEngine()
|
||||
}
|
||||
|
||||
|
||||
private let defaults: UserDefaults
|
||||
private let changeTokenKey = "immich:changeToken"
|
||||
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
|
||||
private let recoveredAlbumSubType = 1000000219
|
||||
|
||||
|
||||
private var hashTask: Task<Void?, Error>?
|
||||
private static let hashCancelledCode = "HASH_CANCELLED"
|
||||
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
|
||||
|
||||
|
||||
|
||||
|
||||
init(with defaults: UserDefaults = .standard) {
|
||||
self.defaults = defaults
|
||||
}
|
||||
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func getChangeToken() -> PHPersistentChangeToken? {
|
||||
guard let data = defaults.data(forKey: changeTokenKey) else {
|
||||
@@ -51,7 +51,7 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data)
|
||||
}
|
||||
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func saveChangeToken(token: PHPersistentChangeToken) -> Void {
|
||||
guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else {
|
||||
@@ -59,18 +59,18 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
defaults.set(data, forKey: changeTokenKey)
|
||||
}
|
||||
|
||||
|
||||
func clearSyncCheckpoint() -> Void {
|
||||
defaults.removeObject(forKey: changeTokenKey)
|
||||
}
|
||||
|
||||
|
||||
func checkpointSync() {
|
||||
guard #available(iOS 16, *) else {
|
||||
return
|
||||
}
|
||||
saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken)
|
||||
}
|
||||
|
||||
|
||||
func shouldFullSync() -> Bool {
|
||||
guard #available(iOS 16, *),
|
||||
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized,
|
||||
@@ -78,36 +78,36 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
// When we do not have access to photo library, older iOS version or No token available, fallback to full sync
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else {
|
||||
// Cannot fetch persistent changes
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
func getAlbums() throws -> [PlatformAlbum] {
|
||||
var albums: [PlatformAlbum] = []
|
||||
|
||||
|
||||
albumTypes.forEach { type in
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
for i in 0..<collections.count {
|
||||
let album = collections.object(at: i)
|
||||
|
||||
|
||||
// Ignore recovered album
|
||||
if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
|
||||
options.includeHiddenAssets = false
|
||||
|
||||
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
|
||||
|
||||
let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream
|
||||
|
||||
|
||||
var domainAlbum = PlatformAlbum(
|
||||
id: album.localIdentifier,
|
||||
name: album.localizedTitle!,
|
||||
@@ -115,57 +115,57 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
isCloud: isCloud,
|
||||
assetCount: Int64(assets.count)
|
||||
)
|
||||
|
||||
|
||||
if let firstAsset = assets.firstObject {
|
||||
domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) }
|
||||
}
|
||||
|
||||
|
||||
albums.append(domainAlbum)
|
||||
}
|
||||
}
|
||||
return albums.sorted { $0.id < $1.id }
|
||||
}
|
||||
|
||||
|
||||
func getMediaChanges() throws -> SyncDelta {
|
||||
guard #available(iOS 16, *) else {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil)
|
||||
}
|
||||
|
||||
|
||||
guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else {
|
||||
throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil)
|
||||
}
|
||||
|
||||
|
||||
guard let storedToken = getChangeToken() else {
|
||||
// No token exists, definitely need a full sync
|
||||
print("MediaManager::getMediaChanges: No token found")
|
||||
throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil)
|
||||
}
|
||||
|
||||
|
||||
let currentToken = PHPhotoLibrary.shared().currentChangeToken
|
||||
if storedToken == currentToken {
|
||||
return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
|
||||
}
|
||||
|
||||
|
||||
do {
|
||||
let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken)
|
||||
|
||||
|
||||
var updatedAssets: Set<AssetWrapper> = []
|
||||
var deletedAssets: Set<String> = []
|
||||
|
||||
|
||||
for change in changes {
|
||||
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
|
||||
|
||||
|
||||
let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers)
|
||||
deletedAssets.formUnion(details.deletedLocalIdentifiers)
|
||||
|
||||
|
||||
if (updated.isEmpty) { continue }
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options)
|
||||
for i in 0..<result.count {
|
||||
let asset = result.object(at: i)
|
||||
|
||||
|
||||
// Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes
|
||||
let predicate = PlatformAsset(
|
||||
id: asset.localIdentifier,
|
||||
@@ -178,25 +178,25 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
if (updatedAssets.contains(AssetWrapper(with: predicate))) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
let domainAsset = AssetWrapper(with: asset.toPlatformAsset())
|
||||
updatedAssets.insert(domainAsset)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let updates = Array(updatedAssets.map { $0.asset })
|
||||
return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] {
|
||||
guard !assets.isEmpty else {
|
||||
return [:]
|
||||
}
|
||||
|
||||
|
||||
var albumAssets: [String: [String]] = [:]
|
||||
|
||||
|
||||
for type in albumTypes {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
collections.enumerateObjects { (album, _, _) in
|
||||
@@ -211,13 +211,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return albumAssets
|
||||
}
|
||||
|
||||
|
||||
func getAssetIdsForAlbum(albumId: String) throws -> [String] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
var ids: [String] = []
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
@@ -227,13 +227,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
|
||||
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp))
|
||||
let options = PHFetchOptions()
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
@@ -241,32 +241,32 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
return Int64(assets.count)
|
||||
}
|
||||
|
||||
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
if(updatedTimeCond != nil) {
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!))
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
}
|
||||
|
||||
|
||||
let result = getAssetsFromAlbum(in: album, options: options)
|
||||
if(result.count == 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
var assets: [PlatformAsset] = []
|
||||
result.enumerateObjects { (asset, _, _) in
|
||||
assets.append(asset.toPlatformAsset())
|
||||
}
|
||||
return assets
|
||||
}
|
||||
|
||||
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) {
|
||||
if let prevTask = hashTask {
|
||||
prevTask.cancel()
|
||||
@@ -284,11 +284,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
missingAssetIds.remove(asset.localIdentifier)
|
||||
assets.append(asset)
|
||||
}
|
||||
|
||||
|
||||
if Task.isCancelled {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
|
||||
|
||||
await withTaskGroup(of: HashResult?.self) { taskGroup in
|
||||
var results = [HashResult]()
|
||||
results.reserveCapacity(assets.count)
|
||||
@@ -301,28 +301,28 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for await result in taskGroup {
|
||||
guard let result = result else {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
results.append(result)
|
||||
}
|
||||
|
||||
|
||||
for missing in missingAssetIds {
|
||||
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
|
||||
}
|
||||
|
||||
|
||||
return self?.completeWhenActive(for: completion, with: .success(results))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func cancelHashing() {
|
||||
hashTask?.cancel()
|
||||
hashTask = nil
|
||||
}
|
||||
|
||||
|
||||
private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? {
|
||||
class RequestRef {
|
||||
var id: PHAssetResourceDataRequestID?
|
||||
@@ -332,21 +332,21 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
guard let resource = asset.getResource() else {
|
||||
return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil)
|
||||
}
|
||||
|
||||
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
let options = PHAssetResourceRequestOptions()
|
||||
options.isNetworkAccessAllowed = allowNetworkAccess
|
||||
|
||||
|
||||
return await withCheckedContinuation { continuation in
|
||||
var hasher = Insecure.SHA1()
|
||||
|
||||
|
||||
requestRef.id = PHAssetResourceManager.default().requestData(
|
||||
for: resource,
|
||||
options: options,
|
||||
@@ -377,7 +377,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
PHAssetResourceManager.default().cancelDataRequest(requestId)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]] {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature not supported on iOS.", details: nil)
|
||||
}
|
||||
|
||||
private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
|
||||
// Ensure to actually getting all assets for the Recents album
|
||||
if (album.assetCollectionSubtype == .smartAlbumUserLibrary) {
|
||||
|
||||
@@ -32,6 +32,17 @@ platform :ios do
|
||||
)
|
||||
end
|
||||
|
||||
# Helper method to get version from pubspec.yaml
|
||||
def get_version_from_pubspec
|
||||
require 'yaml'
|
||||
|
||||
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
|
||||
pubspec = YAML.load_file(pubspec_path)
|
||||
|
||||
version_string = pubspec['version']
|
||||
version_string ? version_string.split('+').first : nil
|
||||
end
|
||||
|
||||
# Helper method to configure code signing for all targets
|
||||
def configure_code_signing(bundle_id_suffix: "")
|
||||
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
|
||||
@@ -158,7 +169,8 @@ platform :ios do
|
||||
# Build and upload with version number
|
||||
build_and_upload(
|
||||
api_key: api_key,
|
||||
version_number: "2.1.0"
|
||||
version_number: get_version_from_pubspec,
|
||||
distribute_external: false,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -168,8 +180,9 @@ platform :ios do
|
||||
path: "./Runner.xcodeproj",
|
||||
targets: ["Runner", "ShareExtension", "WidgetExtension"]
|
||||
)
|
||||
|
||||
increment_version_number(
|
||||
version_number: "2.2.1"
|
||||
version_number: get_version_from_pubspec
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -58,3 +58,6 @@ const int kPhotoTabIndex = 0;
|
||||
const int kSearchTabIndex = 1;
|
||||
const int kAlbumTabIndex = 2;
|
||||
const int kLibraryTabIndex = 3;
|
||||
|
||||
// Workaround for SQLite's variable limit (SQLITE_MAX_VARIABLE_NUMBER = 32766)
|
||||
const int kDriftMaxChunk = 32000;
|
||||
|
||||
@@ -239,7 +239,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
|
||||
return _ref
|
||||
?.read(uploadServiceProvider)
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken);
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
|
||||
},
|
||||
(error, stack) {
|
||||
dPrint(() => "Error in backup zone $error, $stack");
|
||||
|
||||
@@ -2,8 +2,10 @@ import 'package:flutter/services.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
@@ -13,6 +15,7 @@ class HashService {
|
||||
final int _batchSize;
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final DriftLocalAssetRepository _localAssetRepository;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final bool Function()? _cancelChecker;
|
||||
final _log = Logger('HashService');
|
||||
@@ -20,11 +23,13 @@ class HashService {
|
||||
HashService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required DriftLocalAssetRepository localAssetRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
bool Function()? cancelChecker,
|
||||
int? batchSize,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_localAssetRepository = localAssetRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_cancelChecker = cancelChecker,
|
||||
_nativeSyncApi = nativeSyncApi,
|
||||
_batchSize = batchSize ?? kBatchHashFileLimit;
|
||||
@@ -49,6 +54,14 @@ class HashService {
|
||||
await _hashAssets(album, assetsToHash);
|
||||
}
|
||||
}
|
||||
if (CurrentPlatform.isAndroid && localAlbums.isNotEmpty) {
|
||||
final backupAlbumIds = localAlbums.map((e) => e.id);
|
||||
final trashedToHash = await _trashedLocalAssetRepository.getAssetsToHash(backupAlbumIds);
|
||||
if (trashedToHash.isNotEmpty) {
|
||||
final pseudoAlbum = LocalAlbum(id: '-pseudoAlbum', name: 'Trash', updatedAt: DateTime.now());
|
||||
await _hashAssets(pseudoAlbum, trashedToHash, isTrashed: true);
|
||||
}
|
||||
}
|
||||
} on PlatformException catch (e) {
|
||||
if (e.code == _kHashCancelledCode) {
|
||||
_log.warning("Hashing cancelled by platform");
|
||||
@@ -65,7 +78,7 @@ class HashService {
|
||||
/// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB
|
||||
/// with hash for those that were successfully hashed. Hashes are looked up in a table
|
||||
/// [LocalAssetHashEntity] by local id. Only missing entries are newly hashed and added to the DB.
|
||||
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash) async {
|
||||
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash, {bool isTrashed = false}) async {
|
||||
final toHash = <String, LocalAsset>{};
|
||||
|
||||
for (final asset in assetsToHash) {
|
||||
@@ -76,16 +89,16 @@ class HashService {
|
||||
|
||||
toHash[asset.id] = asset;
|
||||
if (toHash.length == _batchSize) {
|
||||
await _processBatch(album, toHash);
|
||||
await _processBatch(album, toHash, isTrashed);
|
||||
toHash.clear();
|
||||
}
|
||||
}
|
||||
|
||||
await _processBatch(album, toHash);
|
||||
await _processBatch(album, toHash, isTrashed);
|
||||
}
|
||||
|
||||
/// Processes a batch of assets.
|
||||
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash) async {
|
||||
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash, bool isTrashed) async {
|
||||
if (toHash.isEmpty) {
|
||||
return;
|
||||
}
|
||||
@@ -120,7 +133,10 @@ class HashService {
|
||||
}
|
||||
|
||||
_log.fine("Hashed ${hashed.length}/${toHash.length} assets");
|
||||
|
||||
await _localAssetRepository.updateHashes(hashed);
|
||||
if (isTrashed) {
|
||||
await _trashedLocalAssetRepository.updateHashes(hashed);
|
||||
} else {
|
||||
await _localAssetRepository.updateHashes(hashed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,14 @@ import 'package:collection/collection.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:immich_mobile/utils/datetime_helpers.dart';
|
||||
import 'package:immich_mobile/utils/diff.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
@@ -14,15 +19,34 @@ import 'package:logging/logging.dart';
|
||||
class LocalSyncService {
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final LocalFilesManagerRepository _localFilesManager;
|
||||
final StorageRepository _storageRepository;
|
||||
final Logger _log = Logger("DeviceSyncService");
|
||||
|
||||
LocalSyncService({required DriftLocalAlbumRepository localAlbumRepository, required NativeSyncApi nativeSyncApi})
|
||||
: _localAlbumRepository = localAlbumRepository,
|
||||
_nativeSyncApi = nativeSyncApi;
|
||||
LocalSyncService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required LocalFilesManagerRepository localFilesManager,
|
||||
required StorageRepository storageRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_localFilesManager = localFilesManager,
|
||||
_storageRepository = storageRepository,
|
||||
_nativeSyncApi = nativeSyncApi;
|
||||
|
||||
Future<void> sync({bool full = false}) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
try {
|
||||
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
|
||||
final hasPermission = await _localFilesManager.hasManageMediaPermission();
|
||||
if (hasPermission) {
|
||||
await _syncTrashedAssets();
|
||||
} else {
|
||||
_log.warning("syncTrashedAssets cannot proceed because MANAGE_MEDIA permission is missing");
|
||||
}
|
||||
}
|
||||
if (full || await _nativeSyncApi.shouldFullSync()) {
|
||||
_log.fine("Full sync request from ${full ? "user" : "native"}");
|
||||
return await fullSync();
|
||||
@@ -69,7 +93,6 @@ class LocalSyncService {
|
||||
await updateAlbum(dbAlbum, album);
|
||||
}
|
||||
}
|
||||
|
||||
await _nativeSyncApi.checkpointSync();
|
||||
} catch (e, s) {
|
||||
_log.severe("Error performing device sync", e, s);
|
||||
@@ -273,6 +296,48 @@ class LocalSyncService {
|
||||
bool _albumsEqual(LocalAlbum a, LocalAlbum b) {
|
||||
return a.name == b.name && a.assetCount == b.assetCount && a.updatedAt.isAtSameMomentAs(b.updatedAt);
|
||||
}
|
||||
|
||||
Future<void> _syncTrashedAssets() async {
|
||||
final trashedAssetMap = await _nativeSyncApi.getTrashedAssets();
|
||||
await processTrashedAssets(trashedAssetMap);
|
||||
}
|
||||
|
||||
@visibleForTesting
|
||||
Future<void> processTrashedAssets(Map<String, List<PlatformAsset>> trashedAssetMap) async {
|
||||
if (trashedAssetMap.isEmpty) {
|
||||
_log.info("syncTrashedAssets, No trashed assets found");
|
||||
}
|
||||
final trashedAssets = trashedAssetMap.cast<String, List<Object?>>().entries.expand(
|
||||
(entry) => entry.value.cast<PlatformAsset>().toTrashedAssets(entry.key),
|
||||
);
|
||||
|
||||
_log.fine("syncTrashedAssets, trashedAssets: ${trashedAssets.map((e) => e.asset.id)}");
|
||||
await _trashedLocalAssetRepository.processTrashSnapshot(trashedAssets);
|
||||
|
||||
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
|
||||
if (assetsToRestore.isNotEmpty) {
|
||||
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
|
||||
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
|
||||
} else {
|
||||
_log.info("syncTrashedAssets, No remote assets found for restoration");
|
||||
}
|
||||
|
||||
final localAssetsToTrash = await _trashedLocalAssetRepository.getToTrash();
|
||||
if (localAssetsToTrash.isNotEmpty) {
|
||||
final mediaUrls = await Future.wait(
|
||||
localAssetsToTrash.values
|
||||
.expand((e) => e)
|
||||
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
|
||||
);
|
||||
_log.info("Moving to trash ${mediaUrls.join(", ")} assets");
|
||||
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
|
||||
if (result) {
|
||||
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
|
||||
}
|
||||
} else {
|
||||
_log.info("syncTrashedAssets, No assets found in backup-enabled albums for move to trash");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension on Iterable<PlatformAlbum> {
|
||||
@@ -290,20 +355,26 @@ extension on Iterable<PlatformAlbum> {
|
||||
|
||||
extension on Iterable<PlatformAsset> {
|
||||
List<LocalAsset> toLocalAssets() {
|
||||
return map(
|
||||
(e) => LocalAsset(
|
||||
id: e.id,
|
||||
name: e.name,
|
||||
checksum: null,
|
||||
type: AssetType.values.elementAtOrNull(e.type) ?? AssetType.other,
|
||||
createdAt: tryFromSecondsSinceEpoch(e.createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
width: e.width,
|
||||
height: e.height,
|
||||
durationInSeconds: e.durationInSeconds,
|
||||
orientation: e.orientation,
|
||||
isFavorite: e.isFavorite,
|
||||
),
|
||||
).toList();
|
||||
return map((e) => e.toLocalAsset()).toList();
|
||||
}
|
||||
|
||||
Iterable<TrashedAsset> toTrashedAssets(String albumId) {
|
||||
return map((e) => (albumId: albumId, asset: e.toLocalAsset()));
|
||||
}
|
||||
}
|
||||
|
||||
extension on PlatformAsset {
|
||||
LocalAsset toLocalAsset() => LocalAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
checksum: null,
|
||||
type: AssetType.values.elementAtOrNull(type) ?? AssetType.other,
|
||||
createdAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
updatedAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
width: width,
|
||||
height: height,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
orientation: orientation,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
@@ -11,14 +18,26 @@ class SyncStreamService {
|
||||
|
||||
final SyncApiRepository _syncApiRepository;
|
||||
final SyncStreamRepository _syncStreamRepository;
|
||||
final DriftLocalAssetRepository _localAssetRepository;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final LocalFilesManagerRepository _localFilesManager;
|
||||
final StorageRepository _storageRepository;
|
||||
final bool Function()? _cancelChecker;
|
||||
|
||||
SyncStreamService({
|
||||
required SyncApiRepository syncApiRepository,
|
||||
required SyncStreamRepository syncStreamRepository,
|
||||
required DriftLocalAssetRepository localAssetRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required LocalFilesManagerRepository localFilesManager,
|
||||
required StorageRepository storageRepository,
|
||||
bool Function()? cancelChecker,
|
||||
}) : _syncApiRepository = syncApiRepository,
|
||||
_syncStreamRepository = syncStreamRepository,
|
||||
_localAssetRepository = localAssetRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_localFilesManager = localFilesManager,
|
||||
_storageRepository = storageRepository,
|
||||
_cancelChecker = cancelChecker;
|
||||
|
||||
bool get isCancelled => _cancelChecker?.call() ?? false;
|
||||
@@ -83,7 +102,18 @@ class SyncStreamService {
|
||||
case SyncEntityType.partnerDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerV1(data.cast());
|
||||
case SyncEntityType.assetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(data.cast());
|
||||
final remoteSyncAssets = data.cast<SyncAssetV1>();
|
||||
await _syncStreamRepository.updateAssetsV1(remoteSyncAssets);
|
||||
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
|
||||
final hasPermission = await _localFilesManager.hasManageMediaPermission();
|
||||
if (hasPermission) {
|
||||
await _handleRemoteTrashed(remoteSyncAssets.where((e) => e.deletedAt != null).map((e) => e.checksum));
|
||||
await _applyRemoteRestoreToLocal();
|
||||
} else {
|
||||
_logger.warning("sync Trashed Assets cannot proceed because MANAGE_MEDIA permission is missing");
|
||||
}
|
||||
}
|
||||
return;
|
||||
case SyncEntityType.assetDeleteV1:
|
||||
return _syncStreamRepository.deleteAssetsV1(data.cast());
|
||||
case SyncEntityType.assetExifV1:
|
||||
@@ -132,7 +162,8 @@ class SyncStreamService {
|
||||
return;
|
||||
// SyncCompleteV1 is used to signal the completion of the sync process. Cleanup stale assets and signal completion
|
||||
case SyncEntityType.syncCompleteV1:
|
||||
return _syncStreamRepository.pruneAssets();
|
||||
return;
|
||||
// return _syncStreamRepository.pruneAssets();
|
||||
// Request to reset the client state. Clear everything related to remote entities
|
||||
case SyncEntityType.syncResetV1:
|
||||
return _syncStreamRepository.reset();
|
||||
@@ -211,4 +242,36 @@ class SyncStreamService {
|
||||
_logger.severe("Error processing AssetUploadReadyV1 websocket batch events", error, stackTrace);
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _handleRemoteTrashed(Iterable<String> checksums) async {
|
||||
if (checksums.isEmpty) {
|
||||
return Future.value();
|
||||
} else {
|
||||
final localAssetsToTrash = await _localAssetRepository.getAssetsFromBackupAlbums(checksums);
|
||||
if (localAssetsToTrash.isNotEmpty) {
|
||||
final mediaUrls = await Future.wait(
|
||||
localAssetsToTrash.values
|
||||
.expand((e) => e)
|
||||
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
|
||||
);
|
||||
_logger.info("Moving to trash ${mediaUrls.join(", ")} assets");
|
||||
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
|
||||
if (result) {
|
||||
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
|
||||
}
|
||||
} else {
|
||||
_logger.info("No assets found in backup-enabled albums for assets: $checksums");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _applyRemoteRestoreToLocal() async {
|
||||
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
|
||||
if (assetsToRestore.isNotEmpty) {
|
||||
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
|
||||
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
|
||||
} else {
|
||||
_logger.info("No remote assets found for restoration");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)')
|
||||
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)')
|
||||
class TrashedLocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
|
||||
const TrashedLocalAssetEntity();
|
||||
|
||||
TextColumn get id => text()();
|
||||
|
||||
TextColumn get albumId => text()();
|
||||
|
||||
TextColumn get checksum => text().nullable()();
|
||||
|
||||
BoolColumn get isFavorite => boolean().withDefault(const Constant(false))();
|
||||
|
||||
IntColumn get orientation => integer().withDefault(const Constant(0))();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {id, albumId};
|
||||
}
|
||||
|
||||
extension TrashedLocalAssetEntityDataDomainExtension on TrashedLocalAssetEntityData {
|
||||
LocalAsset toLocalAsset() => LocalAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
checksum: checksum,
|
||||
type: type,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
height: height,
|
||||
width: width,
|
||||
orientation: orientation,
|
||||
);
|
||||
}
|
||||
1080
mobile/lib/infrastructure/entities/trashed_local_asset.entity.drift.dart
generated
Normal file
1080
mobile/lib/infrastructure/entities/trashed_local_asset.entity.drift.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
|
||||
@@ -62,6 +63,7 @@ class IsarDatabaseRepository implements IDatabaseRepository {
|
||||
PersonEntity,
|
||||
AssetFaceEntity,
|
||||
StoreEntity,
|
||||
TrashedLocalAssetEntity,
|
||||
],
|
||||
include: {'package:immich_mobile/infrastructure/entities/merged_asset.drift'},
|
||||
)
|
||||
@@ -93,7 +95,7 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
}
|
||||
|
||||
@override
|
||||
int get schemaVersion => 12;
|
||||
int get schemaVersion => 13;
|
||||
|
||||
@override
|
||||
MigrationStrategy get migration => MigrationStrategy(
|
||||
@@ -178,6 +180,11 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
);
|
||||
}
|
||||
},
|
||||
from12To13: (m, v13) async {
|
||||
await m.create(v13.trashedLocalAssetEntity);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetChecksum);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetAlbum);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -37,9 +37,11 @@ import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.da
|
||||
as i17;
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
|
||||
as i18;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart'
|
||||
as i19;
|
||||
import 'package:drift/internal/modular.dart' as i20;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
as i20;
|
||||
import 'package:drift/internal/modular.dart' as i21;
|
||||
|
||||
abstract class $Drift extends i0.GeneratedDatabase {
|
||||
$Drift(i0.QueryExecutor e) : super(e);
|
||||
@@ -77,9 +79,11 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
late final i17.$AssetFaceEntityTable assetFaceEntity = i17
|
||||
.$AssetFaceEntityTable(this);
|
||||
late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this);
|
||||
i19.MergedAssetDrift get mergedAssetDrift => i20.ReadDatabaseContainer(
|
||||
late final i19.$TrashedLocalAssetEntityTable trashedLocalAssetEntity = i19
|
||||
.$TrashedLocalAssetEntityTable(this);
|
||||
i20.MergedAssetDrift get mergedAssetDrift => i21.ReadDatabaseContainer(
|
||||
this,
|
||||
).accessor<i19.MergedAssetDrift>(i19.MergedAssetDrift.new);
|
||||
).accessor<i20.MergedAssetDrift>(i20.MergedAssetDrift.new);
|
||||
@override
|
||||
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
|
||||
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
|
||||
@@ -108,7 +112,10 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
i11.idxLatLng,
|
||||
i19.idxTrashedLocalAssetChecksum,
|
||||
i19.idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
@override
|
||||
i0.StreamQueryUpdateRules
|
||||
@@ -336,4 +343,9 @@ class $DriftManager {
|
||||
i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
|
||||
i18.$$StoreEntityTableTableManager get storeEntity =>
|
||||
i18.$$StoreEntityTableTableManager(_db, _db.storeEntity);
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager get trashedLocalAssetEntity =>
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager(
|
||||
_db,
|
||||
_db.trashedLocalAssetEntity,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5037,6 +5037,454 @@ final class Schema12 extends i0.VersionedSchema {
|
||||
);
|
||||
}
|
||||
|
||||
final class Schema13 extends i0.VersionedSchema {
|
||||
Schema13({required super.database}) : super(version: 13);
|
||||
@override
|
||||
late final List<i1.DatabaseSchemaEntity> entities = [
|
||||
userEntity,
|
||||
remoteAssetEntity,
|
||||
stackEntity,
|
||||
localAssetEntity,
|
||||
remoteAlbumEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
idxLocalAssetChecksum,
|
||||
idxRemoteAssetOwnerChecksum,
|
||||
uQRemoteAssetsOwnerChecksum,
|
||||
uQRemoteAssetsOwnerLibraryChecksum,
|
||||
idxRemoteAssetChecksum,
|
||||
authUserEntity,
|
||||
userMetadataEntity,
|
||||
partnerEntity,
|
||||
remoteExifEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity,
|
||||
memoryEntity,
|
||||
memoryAssetEntity,
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
idxLatLng,
|
||||
idxTrashedLocalAssetChecksum,
|
||||
idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
late final Shape20 userEntity = Shape20(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_91,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape17 remoteAssetEntity = Shape17(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_13,
|
||||
_column_14,
|
||||
_column_15,
|
||||
_column_16,
|
||||
_column_17,
|
||||
_column_18,
|
||||
_column_19,
|
||||
_column_20,
|
||||
_column_21,
|
||||
_column_86,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape3 stackEntity = Shape3(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'stack_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape2 localAssetEntity = Shape2(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape9 remoteAlbumEntity = Shape9(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_56,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_57,
|
||||
_column_58,
|
||||
_column_59,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape19 localAlbumEntity = Shape19(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_5,
|
||||
_column_31,
|
||||
_column_32,
|
||||
_column_90,
|
||||
_column_33,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape22 localAlbumAssetEntity = Shape22(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_34, _column_35, _column_33],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLocalAssetChecksum = i1.Index(
|
||||
'idx_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
|
||||
'idx_remote_asset_owner_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_library_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetChecksum = i1.Index(
|
||||
'idx_remote_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
|
||||
);
|
||||
late final Shape21 authUserEntity = Shape21(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'auth_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_2,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_92,
|
||||
_column_93,
|
||||
_column_7,
|
||||
_column_94,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape4 userMetadataEntity = Shape4(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_metadata_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
|
||||
columns: [_column_25, _column_26, _column_27],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape5 partnerEntity = Shape5(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'partner_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
|
||||
columns: [_column_28, _column_29, _column_30],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape8 remoteExifEntity = Shape8(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_exif_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id)'],
|
||||
columns: [
|
||||
_column_36,
|
||||
_column_37,
|
||||
_column_38,
|
||||
_column_39,
|
||||
_column_40,
|
||||
_column_41,
|
||||
_column_11,
|
||||
_column_10,
|
||||
_column_42,
|
||||
_column_43,
|
||||
_column_44,
|
||||
_column_45,
|
||||
_column_46,
|
||||
_column_47,
|
||||
_column_48,
|
||||
_column_49,
|
||||
_column_50,
|
||||
_column_51,
|
||||
_column_52,
|
||||
_column_53,
|
||||
_column_54,
|
||||
_column_55,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape7 remoteAlbumAssetEntity = Shape7(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_36, _column_60],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape10 remoteAlbumUserEntity = Shape10(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
|
||||
columns: [_column_60, _column_25, _column_61],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape11 memoryEntity = Shape11(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_18,
|
||||
_column_15,
|
||||
_column_8,
|
||||
_column_62,
|
||||
_column_63,
|
||||
_column_64,
|
||||
_column_65,
|
||||
_column_66,
|
||||
_column_67,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape12 memoryAssetEntity = Shape12(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
|
||||
columns: [_column_36, _column_68],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape14 personEntity = Shape14(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'person_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_1,
|
||||
_column_69,
|
||||
_column_71,
|
||||
_column_72,
|
||||
_column_73,
|
||||
_column_74,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape15 assetFaceEntity = Shape15(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'asset_face_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_36,
|
||||
_column_76,
|
||||
_column_77,
|
||||
_column_78,
|
||||
_column_79,
|
||||
_column_80,
|
||||
_column_81,
|
||||
_column_82,
|
||||
_column_83,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape18 storeEntity = Shape18(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'store_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_87, _column_88, _column_89],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape23 trashedLocalAssetEntity = Shape23(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'trashed_local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id, album_id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_95,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLatLng = i1.Index(
|
||||
'idx_lat_lng',
|
||||
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetChecksum = i1.Index(
|
||||
'idx_trashed_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetAlbum = i1.Index(
|
||||
'idx_trashed_local_asset_album',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)',
|
||||
);
|
||||
}
|
||||
|
||||
class Shape23 extends i0.VersionedTable {
|
||||
Shape23({required super.source, required super.alias}) : super.aliased();
|
||||
i1.GeneratedColumn<String> get name =>
|
||||
columnsByName['name']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<int> get type =>
|
||||
columnsByName['type']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<DateTime> get createdAt =>
|
||||
columnsByName['created_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<DateTime> get updatedAt =>
|
||||
columnsByName['updated_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<int> get width =>
|
||||
columnsByName['width']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get height =>
|
||||
columnsByName['height']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get durationInSeconds =>
|
||||
columnsByName['duration_in_seconds']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<String> get id =>
|
||||
columnsByName['id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get albumId =>
|
||||
columnsByName['album_id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get checksum =>
|
||||
columnsByName['checksum']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<bool> get isFavorite =>
|
||||
columnsByName['is_favorite']! as i1.GeneratedColumn<bool>;
|
||||
i1.GeneratedColumn<int> get orientation =>
|
||||
columnsByName['orientation']! as i1.GeneratedColumn<int>;
|
||||
}
|
||||
|
||||
i1.GeneratedColumn<String> _column_95(String aliasedName) =>
|
||||
i1.GeneratedColumn<String>(
|
||||
'album_id',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.string,
|
||||
);
|
||||
i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
|
||||
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
|
||||
@@ -5049,6 +5497,7 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
}) {
|
||||
return (currentVersion, database) async {
|
||||
switch (currentVersion) {
|
||||
@@ -5107,6 +5556,11 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from11To12(migrator, schema);
|
||||
return 12;
|
||||
case 12:
|
||||
final schema = Schema13(database: database);
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from12To13(migrator, schema);
|
||||
return 13;
|
||||
default:
|
||||
throw ArgumentError.value('Unknown migration from $currentVersion');
|
||||
}
|
||||
@@ -5125,6 +5579,7 @@ i1.OnUpgrade stepByStep({
|
||||
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
}) => i0.VersionedSchema.stepByStepHelper(
|
||||
step: migrationSteps(
|
||||
from1To2: from1To2,
|
||||
@@ -5138,5 +5593,6 @@ i1.OnUpgrade stepByStep({
|
||||
from9To10: from9To10,
|
||||
from10To11: from10To11,
|
||||
from11To12: from11To12,
|
||||
from12To13: from12To13,
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
@@ -8,6 +10,7 @@ import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
|
||||
class DriftLocalAssetRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
|
||||
const DriftLocalAssetRepository(this._db) : super(_db);
|
||||
|
||||
SingleOrNullSelectable<LocalAsset?> _assetSelectable(String id) {
|
||||
@@ -95,4 +98,32 @@ class DriftLocalAssetRepository extends DriftDatabaseRepository {
|
||||
}
|
||||
return query.map((localAlbum) => localAlbum.toDto()).get();
|
||||
}
|
||||
|
||||
Future<Map<String, List<LocalAsset>>> getAssetsFromBackupAlbums(Iterable<String> checksums) async {
|
||||
if (checksums.isEmpty) {
|
||||
return {};
|
||||
}
|
||||
|
||||
final result = <String, List<LocalAsset>>{};
|
||||
|
||||
for (final slice in checksums.toSet().slices(kDriftMaxChunk)) {
|
||||
final rows =
|
||||
await (_db.select(_db.localAlbumAssetEntity).join([
|
||||
innerJoin(_db.localAlbumEntity, _db.localAlbumAssetEntity.albumId.equalsExp(_db.localAlbumEntity.id)),
|
||||
innerJoin(_db.localAssetEntity, _db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id)),
|
||||
])..where(
|
||||
_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected) &
|
||||
_db.localAssetEntity.checksum.isIn(slice),
|
||||
))
|
||||
.get();
|
||||
|
||||
for (final row in rows) {
|
||||
final albumId = row.readTable(_db.localAlbumAssetEntity).albumId;
|
||||
final assetData = row.readTable(_db.localAssetEntity);
|
||||
final asset = assetData.toDto();
|
||||
(result[albumId] ??= <LocalAsset>[]).add(asset);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import 'package:maplibre_gl/maplibre_gl.dart';
|
||||
|
||||
class RemoteAssetRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
|
||||
const RemoteAssetRepository(this._db) : super(_db);
|
||||
|
||||
/// For testing purposes
|
||||
|
||||
@@ -0,0 +1,252 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
|
||||
typedef TrashedAsset = ({String albumId, LocalAsset asset});
|
||||
|
||||
class DriftTrashedLocalAssetRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
|
||||
const DriftTrashedLocalAssetRepository(this._db) : super(_db);
|
||||
|
||||
Future<void> updateHashes(Map<String, String> hashes) {
|
||||
if (hashes.isEmpty) {
|
||||
return Future.value();
|
||||
}
|
||||
return _db.batch((batch) async {
|
||||
for (final entry in hashes.entries) {
|
||||
batch.update(
|
||||
_db.trashedLocalAssetEntity,
|
||||
TrashedLocalAssetEntityCompanion(checksum: Value(entry.value)),
|
||||
where: (e) => e.id.equals(entry.key),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<List<LocalAsset>> getAssetsToHash(Iterable<String> albumIds) {
|
||||
final query = _db.trashedLocalAssetEntity.select()..where((r) => r.albumId.isIn(albumIds) & r.checksum.isNull());
|
||||
return query.map((row) => row.toLocalAsset()).get();
|
||||
}
|
||||
|
||||
Future<Iterable<LocalAsset>> getToRestore() async {
|
||||
final selectedAlbumIds = (_db.selectOnly(_db.localAlbumEntity)
|
||||
..addColumns([_db.localAlbumEntity.id])
|
||||
..where(_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected)));
|
||||
|
||||
final rows =
|
||||
await (_db.select(_db.trashedLocalAssetEntity).join([
|
||||
innerJoin(
|
||||
_db.remoteAssetEntity,
|
||||
_db.remoteAssetEntity.checksum.equalsExp(_db.trashedLocalAssetEntity.checksum),
|
||||
),
|
||||
])..where(
|
||||
_db.trashedLocalAssetEntity.albumId.isInQuery(selectedAlbumIds) &
|
||||
_db.remoteAssetEntity.deletedAt.isNull(),
|
||||
))
|
||||
.get();
|
||||
|
||||
return rows.map((result) => result.readTable(_db.trashedLocalAssetEntity).toLocalAsset());
|
||||
}
|
||||
|
||||
/// Applies resulted snapshot of trashed assets:
|
||||
/// - upserts incoming rows
|
||||
/// - deletes rows that are not present in the snapshot
|
||||
Future<void> processTrashSnapshot(Iterable<TrashedAsset> trashedAssets) async {
|
||||
if (trashedAssets.isEmpty) {
|
||||
await _db.delete(_db.trashedLocalAssetEntity).go();
|
||||
return;
|
||||
}
|
||||
final assetIds = trashedAssets.map((e) => e.asset.id).toSet();
|
||||
Map<String, String> localChecksumById = await _getCachedChecksums(assetIds);
|
||||
|
||||
return _db.transaction(() async {
|
||||
await _db.batch((batch) {
|
||||
for (final item in trashedAssets) {
|
||||
final effectiveChecksum = localChecksumById[item.asset.id] ?? item.asset.checksum;
|
||||
final companion = TrashedLocalAssetEntityCompanion.insert(
|
||||
id: item.asset.id,
|
||||
albumId: item.albumId,
|
||||
checksum: Value(effectiveChecksum),
|
||||
name: item.asset.name,
|
||||
type: item.asset.type,
|
||||
createdAt: Value(item.asset.createdAt),
|
||||
updatedAt: Value(item.asset.updatedAt),
|
||||
width: Value(item.asset.width),
|
||||
height: Value(item.asset.height),
|
||||
durationInSeconds: Value(item.asset.durationInSeconds),
|
||||
isFavorite: Value(item.asset.isFavorite),
|
||||
orientation: Value(item.asset.orientation),
|
||||
);
|
||||
|
||||
batch.insert<$TrashedLocalAssetEntityTable, TrashedLocalAssetEntityData>(
|
||||
_db.trashedLocalAssetEntity,
|
||||
companion,
|
||||
onConflict: DoUpdate((_) => companion, where: (old) => old.updatedAt.isNotValue(item.asset.updatedAt)),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
if (assetIds.length <= kDriftMaxChunk) {
|
||||
await (_db.delete(_db.trashedLocalAssetEntity)..where((row) => row.id.isNotIn(assetIds))).go();
|
||||
} else {
|
||||
final existingIds = await (_db.selectOnly(
|
||||
_db.trashedLocalAssetEntity,
|
||||
)..addColumns([_db.trashedLocalAssetEntity.id])).map((r) => r.read(_db.trashedLocalAssetEntity.id)!).get();
|
||||
final idToDelete = existingIds.where((id) => !assetIds.contains(id));
|
||||
for (final slice in idToDelete.slices(kDriftMaxChunk)) {
|
||||
await (_db.delete(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice))).go();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Stream<int> watchCount() {
|
||||
return (_db.selectOnly(_db.trashedLocalAssetEntity)..addColumns([_db.trashedLocalAssetEntity.id.count()]))
|
||||
.watchSingle()
|
||||
.map((row) => row.read<int>(_db.trashedLocalAssetEntity.id.count()) ?? 0);
|
||||
}
|
||||
|
||||
Stream<int> watchHashedCount() {
|
||||
return (_db.selectOnly(_db.trashedLocalAssetEntity)
|
||||
..addColumns([_db.trashedLocalAssetEntity.id.count()])
|
||||
..where(_db.trashedLocalAssetEntity.checksum.isNotNull()))
|
||||
.watchSingle()
|
||||
.map((row) => row.read<int>(_db.trashedLocalAssetEntity.id.count()) ?? 0);
|
||||
}
|
||||
|
||||
Future<void> trashLocalAsset(Map<String, List<LocalAsset>> assetsByAlbums) async {
|
||||
if (assetsByAlbums.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final companions = <TrashedLocalAssetEntityCompanion>[];
|
||||
final idToDelete = <String>{};
|
||||
|
||||
for (final entry in assetsByAlbums.entries) {
|
||||
for (final asset in entry.value) {
|
||||
idToDelete.add(asset.id);
|
||||
companions.add(
|
||||
TrashedLocalAssetEntityCompanion(
|
||||
id: Value(asset.id),
|
||||
name: Value(asset.name),
|
||||
albumId: Value(entry.key),
|
||||
checksum: Value(asset.checksum),
|
||||
type: Value(asset.type),
|
||||
width: Value(asset.width),
|
||||
height: Value(asset.height),
|
||||
durationInSeconds: Value(asset.durationInSeconds),
|
||||
isFavorite: Value(asset.isFavorite),
|
||||
orientation: Value(asset.orientation),
|
||||
createdAt: Value(asset.createdAt),
|
||||
updatedAt: Value(asset.updatedAt),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await _db.transaction(() async {
|
||||
for (final companion in companions) {
|
||||
await _db.into(_db.trashedLocalAssetEntity).insertOnConflictUpdate(companion);
|
||||
}
|
||||
|
||||
for (final slice in idToDelete.slices(kDriftMaxChunk)) {
|
||||
await (_db.delete(_db.localAssetEntity)..where((t) => t.id.isIn(slice))).go();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> applyRestoredAssets(List<String> idList) async {
|
||||
if (idList.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final trashedAssets = <TrashedLocalAssetEntityData>[];
|
||||
|
||||
for (final slice in idList.slices(kDriftMaxChunk)) {
|
||||
final q = _db.select(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice));
|
||||
trashedAssets.addAll(await q.get());
|
||||
}
|
||||
|
||||
if (trashedAssets.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final companions = trashedAssets.map((e) {
|
||||
return LocalAssetEntityCompanion.insert(
|
||||
id: e.id,
|
||||
name: e.name,
|
||||
type: e.type,
|
||||
createdAt: Value(e.createdAt),
|
||||
updatedAt: Value(e.updatedAt),
|
||||
width: Value(e.width),
|
||||
height: Value(e.height),
|
||||
durationInSeconds: Value(e.durationInSeconds),
|
||||
checksum: Value(e.checksum),
|
||||
isFavorite: Value(e.isFavorite),
|
||||
orientation: Value(e.orientation),
|
||||
);
|
||||
});
|
||||
|
||||
await _db.transaction(() async {
|
||||
for (final companion in companions) {
|
||||
await _db.into(_db.localAssetEntity).insertOnConflictUpdate(companion);
|
||||
}
|
||||
for (final slice in idList.slices(kDriftMaxChunk)) {
|
||||
await (_db.delete(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice))).go();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<Map<String, List<LocalAsset>>> getToTrash() async {
|
||||
final result = <String, List<LocalAsset>>{};
|
||||
|
||||
final rows =
|
||||
await (_db.select(_db.localAlbumAssetEntity).join([
|
||||
innerJoin(_db.localAlbumEntity, _db.localAlbumAssetEntity.albumId.equalsExp(_db.localAlbumEntity.id)),
|
||||
innerJoin(_db.localAssetEntity, _db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id)),
|
||||
leftOuterJoin(
|
||||
_db.remoteAssetEntity,
|
||||
_db.remoteAssetEntity.checksum.equalsExp(_db.localAssetEntity.checksum),
|
||||
),
|
||||
])..where(
|
||||
_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected) &
|
||||
_db.remoteAssetEntity.deletedAt.isNotNull(),
|
||||
))
|
||||
.get();
|
||||
|
||||
for (final row in rows) {
|
||||
final albumId = row.readTable(_db.localAlbumAssetEntity).albumId;
|
||||
final asset = row.readTable(_db.localAssetEntity).toDto();
|
||||
(result[albumId] ??= <LocalAsset>[]).add(asset);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
//attempt to reuse existing checksums
|
||||
Future<Map<String, String>> _getCachedChecksums(Set<String> assetIds) async {
|
||||
final localChecksumById = <String, String>{};
|
||||
|
||||
for (final slice in assetIds.slices(kDriftMaxChunk)) {
|
||||
final rows =
|
||||
await (_db.selectOnly(_db.localAssetEntity)
|
||||
..where(_db.localAssetEntity.id.isIn(slice) & _db.localAssetEntity.checksum.isNotNull())
|
||||
..addColumns([_db.localAssetEntity.id, _db.localAssetEntity.checksum]))
|
||||
.get();
|
||||
|
||||
for (final r in rows) {
|
||||
localChecksumById[r.read(_db.localAssetEntity.id)!] = r.read(_db.localAssetEntity.checksum)!;
|
||||
}
|
||||
}
|
||||
|
||||
return localChecksumById;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
@@ -65,7 +66,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
|
||||
if (Store.isBetaTimelineEnabled) {
|
||||
bool syncSuccess = false;
|
||||
await Future.wait([
|
||||
backgroundManager.syncLocal(),
|
||||
backgroundManager.syncLocal(full: Platform.isAndroid ? true : false),
|
||||
backgroundManager.syncRemote().then((success) => syncSuccess = success),
|
||||
]);
|
||||
|
||||
|
||||
28
mobile/lib/platform/native_sync_api.g.dart
generated
28
mobile/lib/platform/native_sync_api.g.dart
generated
@@ -562,4 +562,32 @@ class NativeSyncApi {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Future<Map<String, List<PlatformAsset>>> getTrashedAssets() async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else if (pigeonVar_replyList[0] == null) {
|
||||
throw PlatformException(
|
||||
code: 'null-error',
|
||||
message: 'Host platform returned null value for non-null return value.',
|
||||
);
|
||||
} else {
|
||||
return (pigeonVar_replyList[0] as Map<Object?, Object?>?)!.cast<String, List<PlatformAsset>>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,21 +3,13 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/datetime_extensions.dart';
|
||||
import 'package:immich_mobile/models/activities/activity.model.dart';
|
||||
import 'package:immich_mobile/widgets/activities/comment_bubble.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/action_buttons/like_activity_action_button.widget.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart';
|
||||
import 'package:immich_mobile/providers/activity.provider.dart';
|
||||
import 'package:immich_mobile/providers/activity_service.provider.dart';
|
||||
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/widgets/activities/dismissible_activity.dart';
|
||||
import 'package:immich_mobile/widgets/common/user_circle_avatar.dart';
|
||||
|
||||
@RoutePage()
|
||||
class DriftActivitiesPage extends HookConsumerWidget {
|
||||
@@ -27,10 +19,8 @@ class DriftActivitiesPage extends HookConsumerWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final asset = ref.read(currentAssetNotifier) as RemoteAsset?;
|
||||
|
||||
final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier);
|
||||
final activities = ref.watch(albumActivityProvider(album.id, asset?.id));
|
||||
final activityNotifier = ref.read(albumActivityProvider(album.id).notifier);
|
||||
final activities = ref.watch(albumActivityProvider(album.id));
|
||||
final listViewScrollController = useScrollController();
|
||||
|
||||
void scrollToBottom() {
|
||||
@@ -46,7 +36,7 @@ class DriftActivitiesPage extends HookConsumerWidget {
|
||||
overrides: [currentRemoteAlbumScopedProvider.overrideWithValue(album)],
|
||||
child: Scaffold(
|
||||
appBar: AppBar(
|
||||
title: asset == null ? Text(album.name) : null,
|
||||
title: Text(album.name),
|
||||
actions: [const LikeActivityActionButton(menuItem: true)],
|
||||
actionsPadding: const EdgeInsets.only(right: 8),
|
||||
),
|
||||
@@ -57,7 +47,7 @@ class DriftActivitiesPage extends HookConsumerWidget {
|
||||
activityWidgets.add(
|
||||
Padding(
|
||||
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 6),
|
||||
child: _CommentBubble(activity: activity),
|
||||
child: CommentBubble(activity: activity),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -91,139 +81,3 @@ class DriftActivitiesPage extends HookConsumerWidget {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class _CommentBubble extends ConsumerWidget {
|
||||
final Activity activity;
|
||||
|
||||
const _CommentBubble({required this.activity});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final user = ref.watch(currentUserProvider);
|
||||
final album = ref.watch(currentRemoteAlbumProvider)!;
|
||||
final isOwn = activity.user.id == user?.id;
|
||||
final canDelete = isOwn || album.ownerId == user?.id;
|
||||
final hasAsset = activity.assetId != null && activity.assetId!.isNotEmpty;
|
||||
final isLike = activity.type == ActivityType.like;
|
||||
final bgColor = isOwn ? context.colorScheme.primaryContainer : context.colorScheme.surfaceContainer;
|
||||
|
||||
final activityNotifier = ref.read(albumActivityProvider(album.id, activity.assetId).notifier);
|
||||
|
||||
Future<void> openAssetViewer() async {
|
||||
final activityService = ref.read(activityServiceProvider);
|
||||
final route = await activityService.buildAssetViewerRoute(activity.assetId!, ref);
|
||||
if (route != null) await context.pushRoute(route);
|
||||
}
|
||||
|
||||
Widget avatar() {
|
||||
if (isOwn) {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
|
||||
return UserCircleAvatar(user: activity.user, size: 28, radius: 14);
|
||||
}
|
||||
|
||||
Widget? thumbnail() {
|
||||
if (!hasAsset) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return ConstrainedBox(
|
||||
constraints: const BoxConstraints(maxWidth: 150, maxHeight: 150),
|
||||
child: Stack(
|
||||
children: [
|
||||
GestureDetector(
|
||||
onTap: openAssetViewer,
|
||||
child: ClipRRect(
|
||||
borderRadius: const BorderRadius.all(Radius.circular(10)),
|
||||
child: Image(
|
||||
image: ImmichRemoteThumbnailProvider(assetId: activity.assetId!),
|
||||
fit: BoxFit.cover,
|
||||
),
|
||||
),
|
||||
),
|
||||
if (isLike)
|
||||
Positioned(
|
||||
right: 6,
|
||||
bottom: 6,
|
||||
child: Container(
|
||||
padding: const EdgeInsets.all(4),
|
||||
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
|
||||
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Likes Album widget (for likes without asset)
|
||||
Widget? likesToAlbum() {
|
||||
if (!isLike || hasAsset) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Container(
|
||||
padding: const EdgeInsets.all(8),
|
||||
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
|
||||
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
|
||||
);
|
||||
}
|
||||
|
||||
Widget? commentBubble() {
|
||||
if (activity.comment == null || activity.comment!.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return ConstrainedBox(
|
||||
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.5),
|
||||
child: Container(
|
||||
padding: const EdgeInsets.all(10),
|
||||
decoration: BoxDecoration(color: bgColor, borderRadius: const BorderRadius.all(Radius.circular(12))),
|
||||
child: Text(
|
||||
activity.comment ?? '',
|
||||
style: context.textTheme.bodyLarge?.copyWith(color: context.colorScheme.onSurface),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Combined content widgets
|
||||
final List<Widget> contentChildren = [thumbnail(), likesToAlbum(), commentBubble()].whereType<Widget>().toList();
|
||||
|
||||
return DismissibleActivity(
|
||||
onDismiss: canDelete ? (id) async => await activityNotifier.removeActivity(id) : null,
|
||||
activity.id,
|
||||
Align(
|
||||
alignment: isOwn ? Alignment.centerRight : Alignment.centerLeft,
|
||||
child: ConstrainedBox(
|
||||
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.86),
|
||||
child: Container(
|
||||
margin: const EdgeInsets.symmetric(vertical: 6, horizontal: 10),
|
||||
child: Row(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
if (!isOwn) ...[avatar(), const SizedBox(width: 8)],
|
||||
// Content column
|
||||
Column(
|
||||
crossAxisAlignment: isOwn ? CrossAxisAlignment.end : CrossAxisAlignment.start,
|
||||
children: [
|
||||
...contentChildren.map((w) => Padding(padding: const EdgeInsets.only(bottom: 8.0), child: w)),
|
||||
Text(
|
||||
'${activity.user.name} • ${activity.createdAt.timeAgo()}',
|
||||
style: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.colorScheme.onSurface.withValues(alpha: 0.6),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
if (isOwn) const SizedBox(width: 8),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
import 'package:immich_mobile/models/search/search_filter.model.dart';
|
||||
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/bottom_sheet/general_bottom_sheet.widget.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/search/quick_date_picker.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
|
||||
import 'package:immich_mobile/providers/search/search_input_focus.provider.dart';
|
||||
@@ -54,6 +55,7 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
);
|
||||
|
||||
final previousFilter = useState<SearchFilter?>(null);
|
||||
final dateInputFilter = useState<DateFilterInputModel?>(null);
|
||||
|
||||
final peopleCurrentFilterWidget = useState<Widget?>(null);
|
||||
final dateRangeCurrentFilterWidget = useState<Widget?>(null);
|
||||
@@ -245,19 +247,54 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
);
|
||||
}
|
||||
|
||||
datePicked(DateFilterInputModel? selectedDate) {
|
||||
dateInputFilter.value = selectedDate;
|
||||
if (selectedDate == null) {
|
||||
filter.value = filter.value.copyWith(date: SearchDateFilter());
|
||||
|
||||
dateRangeCurrentFilterWidget.value = null;
|
||||
unawaited(search());
|
||||
return;
|
||||
}
|
||||
|
||||
final date = selectedDate.asDateTimeRange();
|
||||
|
||||
filter.value = filter.value.copyWith(
|
||||
date: SearchDateFilter(
|
||||
takenAfter: date.start,
|
||||
takenBefore: date.end.add(const Duration(hours: 23, minutes: 59, seconds: 59)),
|
||||
),
|
||||
);
|
||||
|
||||
dateRangeCurrentFilterWidget.value = Text(
|
||||
selectedDate.asHumanReadable(context),
|
||||
style: context.textTheme.labelLarge,
|
||||
);
|
||||
|
||||
unawaited(search());
|
||||
}
|
||||
|
||||
showDatePicker() async {
|
||||
final firstDate = DateTime(1900);
|
||||
final lastDate = DateTime.now();
|
||||
|
||||
var dateRange = DateTimeRange(
|
||||
start: filter.value.date.takenAfter ?? lastDate,
|
||||
end: filter.value.date.takenBefore ?? lastDate,
|
||||
);
|
||||
|
||||
// datePicked() may increase the date, this will make the date picker fail an assertion
|
||||
// Fixup the end date to be at most now.
|
||||
if (dateRange.end.isAfter(lastDate)) {
|
||||
dateRange = DateTimeRange(start: dateRange.start, end: lastDate);
|
||||
}
|
||||
|
||||
final date = await showDateRangePicker(
|
||||
context: context,
|
||||
firstDate: firstDate,
|
||||
lastDate: lastDate,
|
||||
currentDate: DateTime.now(),
|
||||
initialDateRange: DateTimeRange(
|
||||
start: filter.value.date.takenAfter ?? lastDate,
|
||||
end: filter.value.date.takenBefore ?? lastDate,
|
||||
),
|
||||
initialDateRange: dateRange,
|
||||
helpText: 'search_filter_date_title'.t(context: context),
|
||||
cancelText: 'cancel'.t(context: context),
|
||||
confirmText: 'select'.t(context: context),
|
||||
@@ -271,40 +308,32 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
);
|
||||
|
||||
if (date == null) {
|
||||
filter.value = filter.value.copyWith(date: SearchDateFilter());
|
||||
|
||||
dateRangeCurrentFilterWidget.value = null;
|
||||
unawaited(search());
|
||||
return;
|
||||
}
|
||||
|
||||
filter.value = filter.value.copyWith(
|
||||
date: SearchDateFilter(
|
||||
takenAfter: date.start,
|
||||
takenBefore: date.end.add(const Duration(hours: 23, minutes: 59, seconds: 59)),
|
||||
),
|
||||
);
|
||||
|
||||
// If date range is less than 24 hours, set the end date to the end of the day
|
||||
if (date.end.difference(date.start).inHours < 24) {
|
||||
dateRangeCurrentFilterWidget.value = Text(
|
||||
DateFormat.yMMMd().format(date.start.toLocal()),
|
||||
style: context.textTheme.labelLarge,
|
||||
);
|
||||
datePicked(null);
|
||||
} else {
|
||||
dateRangeCurrentFilterWidget.value = Text(
|
||||
'search_filter_date_interval'.t(
|
||||
context: context,
|
||||
args: {
|
||||
"start": DateFormat.yMMMd().format(date.start.toLocal()),
|
||||
"end": DateFormat.yMMMd().format(date.end.toLocal()),
|
||||
datePicked(CustomDateFilter.fromRange(date));
|
||||
}
|
||||
}
|
||||
|
||||
showQuickDatePicker() {
|
||||
showFilterBottomSheet(
|
||||
context: context,
|
||||
child: FilterBottomSheetScaffold(
|
||||
title: "pick_date_range".tr(),
|
||||
expanded: true,
|
||||
onClear: () => datePicked(null),
|
||||
child: QuickDatePicker(
|
||||
currentInput: dateInputFilter.value,
|
||||
onRequestPicker: () {
|
||||
context.pop();
|
||||
showDatePicker();
|
||||
},
|
||||
onSelect: (date) {
|
||||
context.pop();
|
||||
datePicked(date);
|
||||
},
|
||||
),
|
||||
style: context.textTheme.labelLarge,
|
||||
);
|
||||
}
|
||||
|
||||
unawaited(search());
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// MEDIA PICKER
|
||||
@@ -589,7 +618,7 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
),
|
||||
SearchFilterChip(
|
||||
icon: Icons.date_range_outlined,
|
||||
onTap: showDatePicker,
|
||||
onTap: showQuickDatePicker,
|
||||
label: 'search_filter_date'.t(context: context),
|
||||
currentFilter: dateRangeCurrentFilterWidget.value,
|
||||
),
|
||||
|
||||
@@ -3,14 +3,12 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/widgets/activities/comment_bubble.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
|
||||
import 'package:immich_mobile/providers/activity.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/widgets/activities/activity_tile.dart';
|
||||
import 'package:immich_mobile/widgets/activities/dismissible_activity.dart';
|
||||
|
||||
class ActivitiesBottomSheet extends HookConsumerWidget {
|
||||
final DraggableScrollableController controller;
|
||||
@@ -28,7 +26,6 @@ class ActivitiesBottomSheet extends HookConsumerWidget {
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final album = ref.watch(currentRemoteAlbumProvider)!;
|
||||
final asset = ref.watch(currentAssetNotifier) as RemoteAsset?;
|
||||
final user = ref.watch(currentUserProvider);
|
||||
|
||||
final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier);
|
||||
final activities = ref.watch(albumActivityProvider(album.id, asset?.id));
|
||||
@@ -47,16 +44,9 @@ class ActivitiesBottomSheet extends HookConsumerWidget {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
final activity = data[data.length - 1 - index];
|
||||
final canDelete = activity.user.id == user?.id || album.ownerId == user?.id;
|
||||
return Padding(
|
||||
padding: const EdgeInsets.symmetric(vertical: 1),
|
||||
child: DismissibleActivity(
|
||||
activity.id,
|
||||
ActivityTile(activity, isBottomSheet: true),
|
||||
onDismiss: canDelete
|
||||
? (activityId) async => await activityNotifier.removeActivity(activity.id)
|
||||
: null,
|
||||
),
|
||||
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
|
||||
child: CommentBubble(activity: activity, isAssetActivity: true),
|
||||
);
|
||||
}, childCount: data.length + 1),
|
||||
);
|
||||
|
||||
@@ -627,10 +627,10 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
|
||||
// Rebuild the widget when the asset viewer state changes
|
||||
// Using multiple selectors to avoid unnecessary rebuilds for other state changes
|
||||
ref.watch(assetViewerProvider.select((s) => s.showingBottomSheet));
|
||||
ref.watch(assetViewerProvider.select((s) => s.showingControls));
|
||||
ref.watch(assetViewerProvider.select((s) => s.backgroundOpacity));
|
||||
ref.watch(assetViewerProvider.select((s) => s.stackIndex));
|
||||
ref.watch(isPlayingMotionVideoProvider);
|
||||
final showingControls = ref.watch(assetViewerProvider.select((s) => s.showingControls));
|
||||
|
||||
// Listen for casting changes and send initial asset to the cast provider
|
||||
ref.listen(castProvider.select((value) => value.isCasting), (_, isCasting) async {
|
||||
@@ -663,7 +663,14 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
|
||||
appBar: const ViewerTopAppBar(),
|
||||
extendBody: true,
|
||||
extendBodyBehindAppBar: true,
|
||||
floatingActionButton: const DownloadStatusFloatingButton(),
|
||||
floatingActionButton: IgnorePointer(
|
||||
ignoring: !showingControls,
|
||||
child: AnimatedOpacity(
|
||||
opacity: showingControls ? 1.0 : 0.0,
|
||||
duration: Durations.short2,
|
||||
child: const DownloadStatusFloatingButton(),
|
||||
),
|
||||
),
|
||||
body: Stack(
|
||||
children: [
|
||||
PhotoViewGallery.builder(
|
||||
|
||||
@@ -127,13 +127,18 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
if (exifInfo == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final fNumber = exifInfo.fNumber.isNotEmpty ? 'ƒ/${exifInfo.fNumber}' : null;
|
||||
final exposureTime = exifInfo.exposureTime.isNotEmpty ? exifInfo.exposureTime : null;
|
||||
final focalLength = exifInfo.focalLength.isNotEmpty ? '${exifInfo.focalLength} mm' : null;
|
||||
final iso = exifInfo.iso != null ? 'ISO ${exifInfo.iso}' : null;
|
||||
return [exposureTime, iso].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator);
|
||||
}
|
||||
|
||||
return [fNumber, exposureTime, focalLength, iso].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator);
|
||||
String? _getLensInfoSubtitle(ExifInfo? exifInfo) {
|
||||
if (exifInfo == null) {
|
||||
return null;
|
||||
}
|
||||
final fNumber = exifInfo.fNumber.isNotEmpty ? 'ƒ/${exifInfo.fNumber}' : null;
|
||||
final focalLength = exifInfo.focalLength.isNotEmpty ? '${exifInfo.focalLength} mm' : null;
|
||||
return [fNumber, focalLength].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator);
|
||||
}
|
||||
|
||||
Future<void> _editDateTime(BuildContext context, WidgetRef ref) async {
|
||||
@@ -141,20 +146,20 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
}
|
||||
|
||||
Widget _buildAppearsInList(WidgetRef ref, BuildContext context) {
|
||||
final aseet = ref.watch(currentAssetNotifier);
|
||||
if (aseet == null) {
|
||||
final asset = ref.watch(currentAssetNotifier);
|
||||
if (asset == null) {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
|
||||
if (!aseet.hasRemote) {
|
||||
if (!asset.hasRemote) {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
|
||||
String? remoteAssetId;
|
||||
if (aseet is RemoteAsset) {
|
||||
remoteAssetId = aseet.id;
|
||||
} else if (aseet is LocalAsset) {
|
||||
remoteAssetId = aseet.remoteAssetId;
|
||||
if (asset is RemoteAsset) {
|
||||
remoteAssetId = asset.id;
|
||||
} else if (asset is LocalAsset) {
|
||||
remoteAssetId = asset.remoteAssetId;
|
||||
}
|
||||
|
||||
if (remoteAssetId == null) {
|
||||
@@ -217,6 +222,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
|
||||
final exifInfo = ref.watch(currentAssetExifProvider).valueOrNull;
|
||||
final cameraTitle = _getCameraInfoTitle(exifInfo);
|
||||
final lensTitle = exifInfo?.lens != null && exifInfo!.lens!.isNotEmpty ? exifInfo.lens : null;
|
||||
final isOwner = ref.watch(currentUserProvider)?.id == (asset is RemoteAsset ? asset.ownerId : null);
|
||||
|
||||
// Build file info tile based on asset type
|
||||
@@ -287,12 +293,23 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
|
||||
_SheetTile(
|
||||
title: cameraTitle,
|
||||
titleStyle: context.textTheme.labelLarge,
|
||||
leading: Icon(Icons.camera_outlined, size: 24, color: context.textTheme.labelLarge?.color),
|
||||
leading: Icon(Icons.camera_alt_outlined, size: 24, color: context.textTheme.labelLarge?.color),
|
||||
subtitle: _getCameraInfoSubtitle(exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
),
|
||||
),
|
||||
// Lens info
|
||||
if (lensTitle != null)
|
||||
_SheetTile(
|
||||
title: lensTitle,
|
||||
titleStyle: context.textTheme.labelLarge,
|
||||
leading: Icon(Icons.camera_outlined, size: 24, color: context.textTheme.labelLarge?.color),
|
||||
subtitle: _getLensInfoSubtitle(exifInfo),
|
||||
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
|
||||
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
|
||||
),
|
||||
),
|
||||
// Appears in (Albums)
|
||||
_buildAppearsInList(ref, context),
|
||||
// padding at the bottom to avoid cut-off
|
||||
|
||||
@@ -86,13 +86,9 @@ class _BaseDraggableScrollableSheetState extends ConsumerState<BaseBottomSheet>
|
||||
SliverToBoxAdapter(
|
||||
child: Column(
|
||||
children: [
|
||||
SizedBox(
|
||||
height: 115,
|
||||
child: ListView(
|
||||
shrinkWrap: true,
|
||||
scrollDirection: Axis.horizontal,
|
||||
children: widget.actions,
|
||||
),
|
||||
SingleChildScrollView(
|
||||
scrollDirection: Axis.horizontal,
|
||||
child: Row(crossAxisAlignment: CrossAxisAlignment.start, children: widget.actions),
|
||||
),
|
||||
const Divider(indent: 16, endIndent: 16),
|
||||
const SizedBox(height: 16),
|
||||
|
||||
208
mobile/lib/presentation/widgets/search/quick_date_picker.dart
Normal file
208
mobile/lib/presentation/widgets/search/quick_date_picker.dart
Normal file
@@ -0,0 +1,208 @@
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
|
||||
sealed class DateFilterInputModel {
|
||||
DateTimeRange<DateTime> asDateTimeRange();
|
||||
|
||||
String asHumanReadable(BuildContext context) {
|
||||
// General implementation for arbitrary date and time ranges
|
||||
// If date range is less than 24 hours, set the end date to the end of the day
|
||||
final date = asDateTimeRange();
|
||||
if (date.end.difference(date.start).inHours < 24) {
|
||||
return DateFormat.yMMMd().format(date.start.toLocal());
|
||||
} else {
|
||||
return 'search_filter_date_interval'.t(
|
||||
context: context,
|
||||
args: {
|
||||
"start": DateFormat.yMMMd().format(date.start.toLocal()),
|
||||
"end": DateFormat.yMMMd().format(date.end.toLocal()),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class RecentMonthRangeFilter extends DateFilterInputModel {
|
||||
final int monthDelta;
|
||||
RecentMonthRangeFilter(this.monthDelta);
|
||||
|
||||
@override
|
||||
DateTimeRange<DateTime> asDateTimeRange() {
|
||||
final now = DateTime.now();
|
||||
// Note that DateTime's constructor properly handles month overflow.
|
||||
final from = DateTime(now.year, now.month - monthDelta, 1);
|
||||
return DateTimeRange<DateTime>(start: from, end: now);
|
||||
}
|
||||
|
||||
@override
|
||||
String asHumanReadable(BuildContext context) {
|
||||
return 'last_months'.t(context: context, args: {"count": monthDelta.toString()});
|
||||
}
|
||||
}
|
||||
|
||||
class YearFilter extends DateFilterInputModel {
|
||||
final int year;
|
||||
YearFilter(this.year);
|
||||
|
||||
@override
|
||||
DateTimeRange<DateTime> asDateTimeRange() {
|
||||
final now = DateTime.now();
|
||||
final from = DateTime(year, 1, 1);
|
||||
|
||||
if (now.year == year) {
|
||||
// To not go beyond today if the user picks the current year
|
||||
return DateTimeRange<DateTime>(start: from, end: now);
|
||||
}
|
||||
|
||||
final to = DateTime(year, 12, 31, 23, 59, 59);
|
||||
return DateTimeRange<DateTime>(start: from, end: to);
|
||||
}
|
||||
|
||||
@override
|
||||
String asHumanReadable(BuildContext context) {
|
||||
return 'in_year'.tr(namedArgs: {"year": year.toString()});
|
||||
}
|
||||
}
|
||||
|
||||
class CustomDateFilter extends DateFilterInputModel {
|
||||
final DateTime start;
|
||||
final DateTime end;
|
||||
|
||||
CustomDateFilter(this.start, this.end);
|
||||
|
||||
factory CustomDateFilter.fromRange(DateTimeRange<DateTime> range) {
|
||||
return CustomDateFilter(range.start, range.end);
|
||||
}
|
||||
|
||||
@override
|
||||
DateTimeRange<DateTime> asDateTimeRange() {
|
||||
return DateTimeRange<DateTime>(start: start, end: end);
|
||||
}
|
||||
}
|
||||
|
||||
enum _QuickPickerType { last1Month, last3Months, last9Months, year, custom }
|
||||
|
||||
class QuickDatePicker extends HookWidget {
|
||||
QuickDatePicker({super.key, required this.currentInput, required this.onSelect, required this.onRequestPicker})
|
||||
: _selection = _selectionFromModel(currentInput),
|
||||
_initialYear = _initialYearFromModel(currentInput);
|
||||
|
||||
final Function() onRequestPicker;
|
||||
final Function(DateFilterInputModel range) onSelect;
|
||||
|
||||
final DateFilterInputModel? currentInput;
|
||||
final _QuickPickerType? _selection;
|
||||
final int _initialYear;
|
||||
|
||||
// Generate a list of recent years from 2000 to the current year (including the current one)
|
||||
final List<int> _recentYears = List.generate(1 + DateTime.now().year - 2000, (index) {
|
||||
return index + 2000;
|
||||
});
|
||||
|
||||
static int _initialYearFromModel(DateFilterInputModel? model) {
|
||||
return model?.asDateTimeRange().start.year ?? DateTime.now().year;
|
||||
}
|
||||
|
||||
static _QuickPickerType? _selectionFromModel(DateFilterInputModel? model) {
|
||||
if (model is RecentMonthRangeFilter) {
|
||||
return switch (model.monthDelta) {
|
||||
1 => _QuickPickerType.last1Month,
|
||||
3 => _QuickPickerType.last3Months,
|
||||
9 => _QuickPickerType.last9Months,
|
||||
_ => _QuickPickerType.custom,
|
||||
};
|
||||
} else if (model is YearFilter) {
|
||||
return _QuickPickerType.year;
|
||||
} else if (model is CustomDateFilter) {
|
||||
return _QuickPickerType.custom;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
Text _monthLabel(BuildContext context, int monthsFromNow) =>
|
||||
const Text('last_months').t(context: context, args: {"count": monthsFromNow.toString()});
|
||||
|
||||
Widget _yearPicker(BuildContext context) {
|
||||
final size = MediaQuery.of(context).size;
|
||||
return Row(
|
||||
children: [
|
||||
const Text("in_year_selector").tr(),
|
||||
const SizedBox(width: 15),
|
||||
Expanded(
|
||||
child: DropdownMenu(
|
||||
initialSelection: _initialYear,
|
||||
menuStyle: MenuStyle(maximumSize: WidgetStateProperty.all(Size(size.width, size.height * 0.5))),
|
||||
dropdownMenuEntries: _recentYears.map((e) => DropdownMenuEntry(value: e, label: e.toString())).toList(),
|
||||
onSelected: (year) {
|
||||
if (year == null) return;
|
||||
onSelect(YearFilter(year));
|
||||
},
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
// We want the exact date picker to always be selectable.
|
||||
// Even if it's already toggled it should always open the full date picker, RadioListTiles don't do that by default
|
||||
// so we wrap it in a InkWell
|
||||
Widget _exactPicker(BuildContext context) {
|
||||
final hasPreviousInput = currentInput != null && currentInput is CustomDateFilter;
|
||||
|
||||
return InkWell(
|
||||
onTap: onRequestPicker,
|
||||
child: IgnorePointer(
|
||||
ignoring: true,
|
||||
child: RadioListTile(
|
||||
title: const Text('pick_custom_range').tr(),
|
||||
subtitle: hasPreviousInput ? Text(currentInput!.asHumanReadable(context)) : null,
|
||||
secondary: hasPreviousInput ? const Icon(Icons.edit) : null,
|
||||
value: _QuickPickerType.custom,
|
||||
toggleable: true,
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Padding(
|
||||
padding: EdgeInsets.only(bottom: MediaQuery.of(context).viewInsets.bottom),
|
||||
child: Scrollbar(
|
||||
// Depending on the screen size the last option might get cut off
|
||||
// Add a clear visual cue that there are more options when scrolling
|
||||
// When the screen size is large enough the scrollbar is hidden automatically
|
||||
trackVisibility: true,
|
||||
thumbVisibility: true,
|
||||
child: SingleChildScrollView(
|
||||
child: RadioGroup(
|
||||
onChanged: (value) {
|
||||
if (value == null) return;
|
||||
final _ = switch (value) {
|
||||
_QuickPickerType.custom => onRequestPicker(),
|
||||
_QuickPickerType.last1Month => onSelect(RecentMonthRangeFilter(1)),
|
||||
_QuickPickerType.last3Months => onSelect(RecentMonthRangeFilter(3)),
|
||||
_QuickPickerType.last9Months => onSelect(RecentMonthRangeFilter(9)),
|
||||
// When a year is selected the combobox triggers onSelect() on its own.
|
||||
// Here we handle the radio button being selected which can only ever be the initial year
|
||||
_QuickPickerType.year => onSelect(YearFilter(_initialYear)),
|
||||
};
|
||||
},
|
||||
groupValue: _selection,
|
||||
child: Column(
|
||||
children: [
|
||||
RadioListTile(title: _monthLabel(context, 1), value: _QuickPickerType.last1Month, toggleable: true),
|
||||
RadioListTile(title: _monthLabel(context, 3), value: _QuickPickerType.last3Months, toggleable: true),
|
||||
RadioListTile(title: _monthLabel(context, 9), value: _QuickPickerType.last9Months, toggleable: true),
|
||||
RadioListTile(title: _yearPicker(context), value: _QuickPickerType.year, toggleable: true),
|
||||
_exactPicker(context),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:immich_mobile/models/activities/activity.model.dart';
|
||||
import 'package:immich_mobile/providers/activity_service.provider.dart';
|
||||
import 'package:immich_mobile/providers/activity_statistics.provider.dart';
|
||||
@@ -16,13 +17,20 @@ class AlbumActivity extends _$AlbumActivity {
|
||||
|
||||
Future<void> removeActivity(String id) async {
|
||||
if (await ref.watch(activityServiceProvider).removeActivity(id)) {
|
||||
final activities = state.valueOrNull ?? [];
|
||||
final removedActivity = activities.firstWhere((a) => a.id == id);
|
||||
activities.remove(removedActivity);
|
||||
state = AsyncData(activities);
|
||||
// Decrement activity count only for comments
|
||||
final removedActivity = _removeFromState(id);
|
||||
if (removedActivity == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (assetId != null) {
|
||||
ref.read(albumActivityProvider(albumId).notifier)._removeFromState(id);
|
||||
}
|
||||
|
||||
if (removedActivity.type == ActivityType.comment) {
|
||||
ref.watch(activityStatisticsProvider(albumId, assetId).notifier).removeActivity();
|
||||
if (assetId != null) {
|
||||
ref.watch(activityStatisticsProvider(albumId).notifier).removeActivity();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -30,8 +38,10 @@ class AlbumActivity extends _$AlbumActivity {
|
||||
Future<void> addLike() async {
|
||||
final activity = await ref.watch(activityServiceProvider).addActivity(albumId, ActivityType.like, assetId: assetId);
|
||||
if (activity.hasValue) {
|
||||
final activities = state.asData?.value ?? [];
|
||||
state = AsyncData([...activities, activity.requireValue]);
|
||||
_addToState(activity.requireValue);
|
||||
if (assetId != null) {
|
||||
ref.read(albumActivityProvider(albumId).notifier)._addToState(activity.requireValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,8 +51,10 @@ class AlbumActivity extends _$AlbumActivity {
|
||||
.addActivity(albumId, ActivityType.comment, assetId: assetId, comment: comment);
|
||||
|
||||
if (activity.hasValue) {
|
||||
final activities = state.valueOrNull ?? [];
|
||||
state = AsyncData([...activities, activity.requireValue]);
|
||||
_addToState(activity.requireValue);
|
||||
if (assetId != null) {
|
||||
ref.read(albumActivityProvider(albumId).notifier)._addToState(activity.requireValue);
|
||||
}
|
||||
ref.watch(activityStatisticsProvider(albumId, assetId).notifier).addActivity();
|
||||
// The previous addActivity call would increase the count of an asset if assetId != null
|
||||
// To also increase the activity count of the album, calling it once again with assetId set to null
|
||||
@@ -51,6 +63,29 @@ class AlbumActivity extends _$AlbumActivity {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void _addToState(Activity activity) {
|
||||
final activities = state.valueOrNull ?? [];
|
||||
if (activities.any((a) => a.id == activity.id)) {
|
||||
return;
|
||||
}
|
||||
state = AsyncData([...activities, activity]);
|
||||
}
|
||||
|
||||
Activity? _removeFromState(String id) {
|
||||
final activities = state.valueOrNull;
|
||||
if (activities == null) {
|
||||
return null;
|
||||
}
|
||||
final activity = activities.firstWhereOrNull((a) => a.id == id);
|
||||
if (activity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final updated = [...activities]..remove(activity);
|
||||
state = AsyncData(updated);
|
||||
return activity;
|
||||
}
|
||||
}
|
||||
|
||||
/// Mock class for testing
|
||||
|
||||
2
mobile/lib/providers/activity.provider.g.dart
generated
2
mobile/lib/providers/activity.provider.g.dart
generated
@@ -6,7 +6,7 @@ part of 'activity.provider.dart';
|
||||
// RiverpodGenerator
|
||||
// **************************************************************************
|
||||
|
||||
String _$albumActivityHash() => r'3b0d7acee4d41c84b3f220784c3b904c83f836e6';
|
||||
String _$albumActivityHash() => r'154e8ae98da3efc142369eae46d4005468fd67da';
|
||||
|
||||
/// Copied from Dart SDK
|
||||
class _SystemHash {
|
||||
|
||||
@@ -2,6 +2,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/services/asset.service.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/remote_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
|
||||
@@ -13,6 +14,10 @@ final remoteAssetRepositoryProvider = Provider<RemoteAssetRepository>(
|
||||
(ref) => RemoteAssetRepository(ref.watch(driftProvider)),
|
||||
);
|
||||
|
||||
final trashedLocalAssetRepository = Provider<DriftTrashedLocalAssetRepository>(
|
||||
(ref) => DriftTrashedLocalAssetRepository(ref.watch(driftProvider)),
|
||||
);
|
||||
|
||||
final assetServiceProvider = Provider(
|
||||
(ref) => AssetService(
|
||||
remoteAssetRepository: ref.watch(remoteAssetRepositoryProvider),
|
||||
|
||||
@@ -10,11 +10,17 @@ import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
|
||||
final syncStreamServiceProvider = Provider(
|
||||
(ref) => SyncStreamService(
|
||||
syncApiRepository: ref.watch(syncApiRepositoryProvider),
|
||||
syncStreamRepository: ref.watch(syncStreamRepositoryProvider),
|
||||
localAssetRepository: ref.watch(localAssetRepository),
|
||||
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
|
||||
localFilesManager: ref.watch(localFilesManagerRepositoryProvider),
|
||||
storageRepository: ref.watch(storageRepositoryProvider),
|
||||
cancelChecker: ref.watch(cancellationProvider),
|
||||
),
|
||||
);
|
||||
@@ -26,6 +32,9 @@ final syncStreamRepositoryProvider = Provider((ref) => SyncStreamRepository(ref.
|
||||
final localSyncServiceProvider = Provider(
|
||||
(ref) => LocalSyncService(
|
||||
localAlbumRepository: ref.watch(localAlbumRepository),
|
||||
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
|
||||
localFilesManager: ref.watch(localFilesManagerRepositoryProvider),
|
||||
storageRepository: ref.watch(storageRepositoryProvider),
|
||||
nativeSyncApi: ref.watch(nativeSyncApiProvider),
|
||||
),
|
||||
);
|
||||
@@ -35,5 +44,6 @@ final hashServiceProvider = Provider(
|
||||
localAlbumRepository: ref.watch(localAlbumRepository),
|
||||
localAssetRepository: ref.watch(localAssetRepository),
|
||||
nativeSyncApi: ref.watch(nativeSyncApiProvider),
|
||||
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
|
||||
),
|
||||
);
|
||||
|
||||
12
mobile/lib/providers/infrastructure/trash_sync.provider.dart
Normal file
12
mobile/lib/providers/infrastructure/trash_sync.provider.dart
Normal file
@@ -0,0 +1,12 @@
|
||||
import 'package:async/async.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
|
||||
typedef TrashedAssetsCount = ({int total, int hashed});
|
||||
|
||||
final trashedAssetsCountProvider = StreamProvider<TrashedAssetsCount>((ref) {
|
||||
final repo = ref.watch(trashedLocalAssetRepository);
|
||||
final total$ = repo.watchCount();
|
||||
final hashed$ = repo.watchHashedCount();
|
||||
return StreamZip<int>([total$, hashed$]).map((values) => (total: values[0], hashed: values[1]));
|
||||
});
|
||||
@@ -67,7 +67,7 @@ class ServerInfoNotifier extends StateNotifier<ServerInfo> {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clientVersion < serverVersion) {
|
||||
if (clientVersion < serverVersion && clientVersion.differenceType(serverVersion) != SemVerType.patch) {
|
||||
state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/services/trash.service.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/services/trash.service.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
class TrashNotifier extends StateNotifier<bool> {
|
||||
|
||||
@@ -89,9 +89,16 @@ class AssetMediaRepository {
|
||||
return null;
|
||||
}
|
||||
|
||||
// titleAsync gets the correct original filename for some assets on iOS
|
||||
// otherwise using the `entity.title` would return a random GUID
|
||||
return await entity.titleAsync;
|
||||
try {
|
||||
// titleAsync gets the correct original filename for some assets on iOS
|
||||
// otherwise using the `entity.title` would return a random GUID
|
||||
final originalFilename = await entity.titleAsync;
|
||||
// treat empty filename as missing
|
||||
return originalFilename.isNotEmpty ? originalFilename : null;
|
||||
} catch (e) {
|
||||
_log.warning("Failed to get original filename for asset: $id. Error: $e");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: make this more efficient
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/services/local_files_manager.service.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
final localFilesManagerRepositoryProvider = Provider(
|
||||
(ref) => LocalFilesManagerRepository(ref.watch(localFileManagerServiceProvider)),
|
||||
);
|
||||
|
||||
class LocalFilesManagerRepository {
|
||||
const LocalFilesManagerRepository(this._service);
|
||||
LocalFilesManagerRepository(this._service);
|
||||
|
||||
final Logger _logger = Logger('SyncStreamService');
|
||||
final LocalFilesManagerService _service;
|
||||
|
||||
Future<bool> moveToTrash(List<String> mediaUrls) async {
|
||||
@@ -21,4 +24,26 @@ class LocalFilesManagerRepository {
|
||||
Future<bool> requestManageMediaPermission() async {
|
||||
return await _service.requestManageMediaPermission();
|
||||
}
|
||||
|
||||
Future<bool> hasManageMediaPermission() async {
|
||||
return await _service.hasManageMediaPermission();
|
||||
}
|
||||
|
||||
Future<bool> manageMediaPermission() async {
|
||||
return await _service.manageMediaPermission();
|
||||
}
|
||||
|
||||
Future<List<String>> restoreAssetsFromTrash(Iterable<LocalAsset> assets) async {
|
||||
final restoredIds = <String>[];
|
||||
for (final asset in assets) {
|
||||
_logger.info("Restoring from trash, localId: ${asset.id}, remoteId: ${asset.checksum}");
|
||||
try {
|
||||
await _service.restoreFromTrashById(asset.id, asset.type.index);
|
||||
restoredIds.add(asset.id);
|
||||
} catch (e) {
|
||||
_logger.warning("Restoring failure: $e");
|
||||
}
|
||||
}
|
||||
return restoredIds;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -313,6 +313,7 @@ class AppRouter extends RootStackRouter {
|
||||
settings: page,
|
||||
pageBuilder: (_, __, ___) => child,
|
||||
opaque: false,
|
||||
transitionsBuilder: TransitionsBuilders.fadeIn,
|
||||
),
|
||||
),
|
||||
),
|
||||
|
||||
@@ -77,6 +77,7 @@ class DeepLinkService {
|
||||
"memory" => await _buildMemoryDeepLink(queryParams['id'] ?? ''),
|
||||
"asset" => await _buildAssetDeepLink(queryParams['id'] ?? '', ref),
|
||||
"album" => await _buildAlbumDeepLink(queryParams['id'] ?? ''),
|
||||
"activity" => await _buildActivityDeepLink(queryParams['albumId'] ?? ''),
|
||||
_ => null,
|
||||
};
|
||||
|
||||
@@ -185,4 +186,18 @@ class DeepLinkService {
|
||||
return AlbumViewerRoute(albumId: album.id);
|
||||
}
|
||||
}
|
||||
|
||||
Future<PageRouteInfo?> _buildActivityDeepLink(String albumId) async {
|
||||
if (Store.isBetaTimelineEnabled == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final album = await _betaRemoteAlbumService.get(albumId);
|
||||
|
||||
if (album == null || album.isActivityEnabled == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return DriftActivitiesRoute(album: album);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ final localFileManagerServiceProvider = Provider<LocalFilesManagerService>((ref)
|
||||
|
||||
class LocalFilesManagerService {
|
||||
const LocalFilesManagerService();
|
||||
|
||||
static final Logger _logger = Logger('LocalFilesManager');
|
||||
static const MethodChannel _channel = MethodChannel('file_trash');
|
||||
|
||||
@@ -27,6 +28,15 @@ class LocalFilesManagerService {
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> restoreFromTrashById(String mediaId, int type) async {
|
||||
try {
|
||||
return await _channel.invokeMethod('restoreFromTrash', {'mediaId': mediaId, 'type': type});
|
||||
} catch (e, s) {
|
||||
_logger.warning('Error restore file from trash by Id', e, s);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> requestManageMediaPermission() async {
|
||||
try {
|
||||
return await _channel.invokeMethod('requestManageMediaPermission');
|
||||
@@ -35,4 +45,22 @@ class LocalFilesManagerService {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> hasManageMediaPermission() async {
|
||||
try {
|
||||
return await _channel.invokeMethod('hasManageMediaPermission');
|
||||
} catch (e, s) {
|
||||
_logger.warning('Error requesting manage media permission state', e, s);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> manageMediaPermission() async {
|
||||
try {
|
||||
return await _channel.invokeMethod('manageMediaPermission');
|
||||
} catch (e, s) {
|
||||
_logger.warning('Error requesting manage media permission settings', e, s);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,6 +51,11 @@ dynamic upgradeDto(dynamic value, String targetType) {
|
||||
addDefault(value, 'ocr', false);
|
||||
}
|
||||
break;
|
||||
case 'MemoriesResponse':
|
||||
if (value is Map) {
|
||||
addDefault(value, 'duration', 5);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
enum SemVerType { major, minor, patch }
|
||||
|
||||
class SemVer {
|
||||
final int major;
|
||||
final int minor;
|
||||
@@ -15,8 +17,20 @@ class SemVer {
|
||||
}
|
||||
|
||||
factory SemVer.fromString(String version) {
|
||||
if (version.toLowerCase().startsWith("v")) {
|
||||
version = version.substring(1);
|
||||
}
|
||||
|
||||
final parts = version.split("-")[0].split('.');
|
||||
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
|
||||
if (parts.length != 3) {
|
||||
throw FormatException('Invalid semantic version string: $version');
|
||||
}
|
||||
|
||||
try {
|
||||
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
|
||||
} catch (e) {
|
||||
throw FormatException('Invalid semantic version string: $version');
|
||||
}
|
||||
}
|
||||
|
||||
bool operator >(SemVer other) {
|
||||
@@ -54,6 +68,20 @@ class SemVer {
|
||||
return other is SemVer && other.major == major && other.minor == minor && other.patch == patch;
|
||||
}
|
||||
|
||||
SemVerType? differenceType(SemVer other) {
|
||||
if (major != other.major) {
|
||||
return SemVerType.major;
|
||||
}
|
||||
if (minor != other.minor) {
|
||||
return SemVerType.minor;
|
||||
}
|
||||
if (patch != other.patch) {
|
||||
return SemVerType.patch;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => major.hashCode ^ minor.hashCode ^ patch.hashCode;
|
||||
}
|
||||
|
||||
143
mobile/lib/widgets/activities/comment_bubble.dart
Normal file
143
mobile/lib/widgets/activities/comment_bubble.dart
Normal file
@@ -0,0 +1,143 @@
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/datetime_extensions.dart';
|
||||
import 'package:immich_mobile/models/activities/activity.model.dart';
|
||||
import 'package:immich_mobile/providers/activity.provider.dart';
|
||||
import 'package:immich_mobile/providers/activity_service.provider.dart';
|
||||
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/widgets/activities/dismissible_activity.dart';
|
||||
import 'package:immich_mobile/widgets/common/user_circle_avatar.dart';
|
||||
|
||||
class CommentBubble extends ConsumerWidget {
|
||||
final Activity activity;
|
||||
final bool isAssetActivity;
|
||||
|
||||
const CommentBubble({super.key, required this.activity, this.isAssetActivity = false});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final user = ref.watch(currentUserProvider);
|
||||
final album = ref.watch(currentRemoteAlbumProvider)!;
|
||||
final isOwn = activity.user.id == user?.id;
|
||||
final canDelete = isOwn || album.ownerId == user?.id;
|
||||
final showThumbnail = !isAssetActivity && activity.assetId != null && activity.assetId!.isNotEmpty;
|
||||
final isLike = activity.type == ActivityType.like;
|
||||
final bgColor = isOwn ? context.colorScheme.primaryContainer : context.colorScheme.surfaceContainer;
|
||||
|
||||
final activityNotifier = ref.read(
|
||||
albumActivityProvider(album.id, isAssetActivity ? activity.assetId : null).notifier,
|
||||
);
|
||||
|
||||
Future<void> openAssetViewer() async {
|
||||
final activityService = ref.read(activityServiceProvider);
|
||||
final route = await activityService.buildAssetViewerRoute(activity.assetId!, ref);
|
||||
if (route != null) await context.pushRoute(route);
|
||||
}
|
||||
|
||||
// avatar (hidden for own messages)
|
||||
Widget avatar = const SizedBox.shrink();
|
||||
if (!isOwn) {
|
||||
avatar = UserCircleAvatar(user: activity.user, size: 28, radius: 14);
|
||||
}
|
||||
|
||||
// Thumbnail with tappable behavior and optional heart overlay
|
||||
Widget? thumbnail;
|
||||
if (showThumbnail) {
|
||||
thumbnail = ConstrainedBox(
|
||||
constraints: const BoxConstraints(maxWidth: 150, maxHeight: 150),
|
||||
child: Stack(
|
||||
children: [
|
||||
GestureDetector(
|
||||
onTap: openAssetViewer,
|
||||
child: ClipRRect(
|
||||
borderRadius: const BorderRadius.all(Radius.circular(10)),
|
||||
child: Image(
|
||||
image: ImmichRemoteThumbnailProvider(assetId: activity.assetId!),
|
||||
fit: BoxFit.cover,
|
||||
),
|
||||
),
|
||||
),
|
||||
if (isLike)
|
||||
Positioned(
|
||||
right: 6,
|
||||
bottom: 6,
|
||||
child: Container(
|
||||
padding: const EdgeInsets.all(4),
|
||||
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
|
||||
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Likes widget
|
||||
Widget? likes;
|
||||
if (isLike && !showThumbnail) {
|
||||
likes = Container(
|
||||
padding: const EdgeInsets.all(8),
|
||||
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
|
||||
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
|
||||
);
|
||||
}
|
||||
|
||||
// Comment bubble, comment-only
|
||||
Widget? commentBubble;
|
||||
if (activity.comment != null && activity.comment!.isNotEmpty) {
|
||||
commentBubble = ConstrainedBox(
|
||||
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.5),
|
||||
child: Container(
|
||||
padding: const EdgeInsets.all(10),
|
||||
decoration: BoxDecoration(color: bgColor, borderRadius: const BorderRadius.all(Radius.circular(12))),
|
||||
child: Text(
|
||||
activity.comment ?? '',
|
||||
style: context.textTheme.bodyLarge?.copyWith(color: context.colorScheme.onSurface),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Combined content widgets
|
||||
final List<Widget> contentChildren = [thumbnail, likes, commentBubble].whereType<Widget>().toList();
|
||||
|
||||
return DismissibleActivity(
|
||||
onDismiss: canDelete ? (id) async => await activityNotifier.removeActivity(id) : null,
|
||||
activity.id,
|
||||
Align(
|
||||
alignment: isOwn ? Alignment.centerRight : Alignment.centerLeft,
|
||||
child: ConstrainedBox(
|
||||
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.86),
|
||||
child: Container(
|
||||
margin: const EdgeInsets.symmetric(vertical: 6, horizontal: 10),
|
||||
child: Row(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
if (!isOwn) ...[avatar, const SizedBox(width: 8)],
|
||||
// Content column
|
||||
Column(
|
||||
crossAxisAlignment: isOwn ? CrossAxisAlignment.end : CrossAxisAlignment.start,
|
||||
children: [
|
||||
...contentChildren.map((w) => Padding(padding: const EdgeInsets.only(bottom: 8.0), child: w)),
|
||||
Text(
|
||||
'${activity.user.name} • ${activity.createdAt.timeAgo()}',
|
||||
style: context.textTheme.labelMedium?.copyWith(
|
||||
color: context.colorScheme.onSurface.withValues(alpha: 0.6),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
if (isOwn) const SizedBox(width: 8),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import 'package:immich_mobile/providers/gallery_permission.provider.dart';
|
||||
import 'package:immich_mobile/providers/oauth.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/providers/websocket.provider.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/utils/provider_utils.dart';
|
||||
import 'package:immich_mobile/utils/url_helper.dart';
|
||||
@@ -177,6 +178,55 @@ class LoginForm extends HookConsumerWidget {
|
||||
}
|
||||
}
|
||||
|
||||
getManageMediaPermission() async {
|
||||
final hasPermission = await ref.read(localFilesManagerRepositoryProvider).hasManageMediaPermission();
|
||||
if (!hasPermission) {
|
||||
await showDialog(
|
||||
context: context,
|
||||
builder: (BuildContext context) {
|
||||
return AlertDialog(
|
||||
shape: const RoundedRectangleBorder(borderRadius: BorderRadius.all(Radius.circular(10))),
|
||||
elevation: 5,
|
||||
title: Text(
|
||||
'manage_media_access_title',
|
||||
style: TextStyle(fontSize: 16, fontWeight: FontWeight.bold, color: context.primaryColor),
|
||||
).tr(),
|
||||
content: SingleChildScrollView(
|
||||
child: ListBody(
|
||||
children: [
|
||||
const Text('manage_media_access_subtitle', style: TextStyle(fontSize: 14)).tr(),
|
||||
const SizedBox(height: 4),
|
||||
const Text('manage_media_access_rationale', style: TextStyle(fontSize: 12)).tr(),
|
||||
],
|
||||
),
|
||||
),
|
||||
actions: [
|
||||
TextButton(
|
||||
onPressed: () => Navigator.of(context).pop(),
|
||||
child: Text(
|
||||
'cancel'.tr(),
|
||||
style: TextStyle(fontWeight: FontWeight.w600, color: context.primaryColor),
|
||||
),
|
||||
),
|
||||
TextButton(
|
||||
onPressed: () {
|
||||
ref.read(localFilesManagerRepositoryProvider).requestManageMediaPermission();
|
||||
Navigator.of(context).pop();
|
||||
},
|
||||
child: Text(
|
||||
'manage_media_access_settings'.tr(),
|
||||
style: TextStyle(fontWeight: FontWeight.w600, color: context.primaryColor),
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
bool isSyncRemoteDeletionsMode() => Platform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false);
|
||||
|
||||
login() async {
|
||||
TextInput.finishAutofillContext();
|
||||
|
||||
@@ -194,6 +244,9 @@ class LoginForm extends HookConsumerWidget {
|
||||
final isBeta = Store.isBetaTimelineEnabled;
|
||||
if (isBeta) {
|
||||
await ref.read(galleryPermissionNotifier.notifier).requestGalleryPermission();
|
||||
if (isSyncRemoteDeletionsMode()) {
|
||||
await getManageMediaPermission();
|
||||
}
|
||||
unawaited(handleSyncFlow());
|
||||
ref.read(websocketProvider.notifier).connect();
|
||||
unawaited(context.replaceRoute(const TabShellRoute()));
|
||||
@@ -293,6 +346,9 @@ class LoginForm extends HookConsumerWidget {
|
||||
}
|
||||
if (isBeta) {
|
||||
await ref.read(galleryPermissionNotifier.notifier).requestGalleryPermission();
|
||||
if (isSyncRemoteDeletionsMode()) {
|
||||
await getManageMediaPermission();
|
||||
}
|
||||
unawaited(handleSyncFlow());
|
||||
unawaited(context.replaceRoute(const TabShellRoute()));
|
||||
return;
|
||||
|
||||
@@ -6,7 +6,7 @@ class FilterBottomSheetScaffold extends StatelessWidget {
|
||||
const FilterBottomSheetScaffold({
|
||||
super.key,
|
||||
required this.child,
|
||||
required this.onSearch,
|
||||
this.onSearch,
|
||||
required this.onClear,
|
||||
required this.title,
|
||||
this.expanded,
|
||||
@@ -15,7 +15,7 @@ class FilterBottomSheetScaffold extends StatelessWidget {
|
||||
final bool? expanded;
|
||||
final String title;
|
||||
final Widget child;
|
||||
final Function() onSearch;
|
||||
final Function()? onSearch;
|
||||
final Function() onClear;
|
||||
|
||||
@override
|
||||
@@ -48,15 +48,16 @@ class FilterBottomSheetScaffold extends StatelessWidget {
|
||||
},
|
||||
child: const Text('clear').tr(),
|
||||
),
|
||||
const SizedBox(width: 8),
|
||||
ElevatedButton(
|
||||
key: const Key('search_filter_apply'),
|
||||
onPressed: () {
|
||||
onSearch();
|
||||
context.pop();
|
||||
},
|
||||
child: const Text('search_filter_apply').tr(),
|
||||
),
|
||||
if (onSearch != null) const SizedBox(width: 8),
|
||||
if (onSearch != null)
|
||||
ElevatedButton(
|
||||
key: const Key('search_filter_apply'),
|
||||
onPressed: () {
|
||||
onSearch!();
|
||||
context.pop();
|
||||
},
|
||||
child: const Text('search_filter_apply').tr(),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
|
||||
@@ -8,8 +8,8 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/readonly_mode.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/utils/hooks/app_settings_update_hook.dart';
|
||||
@@ -17,6 +17,7 @@ import 'package:immich_mobile/utils/http_ssl_options.dart';
|
||||
import 'package:immich_mobile/widgets/settings/beta_timeline_list_tile.dart';
|
||||
import 'package:immich_mobile/widgets/settings/custom_proxy_headers_settings/custom_proxy_headers_settings.dart';
|
||||
import 'package:immich_mobile/widgets/settings/local_storage_settings.dart';
|
||||
import 'package:immich_mobile/widgets/settings/settings_action_tile.dart';
|
||||
import 'package:immich_mobile/widgets/settings/settings_slider_list_tile.dart';
|
||||
import 'package:immich_mobile/widgets/settings/settings_sub_page_scaffold.dart';
|
||||
import 'package:immich_mobile/widgets/settings/settings_switch_list_tile.dart';
|
||||
@@ -25,12 +26,15 @@ import 'package:logging/logging.dart';
|
||||
|
||||
class AdvancedSettings extends HookConsumerWidget {
|
||||
const AdvancedSettings({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
bool isLoggedIn = ref.read(currentUserProvider) != null;
|
||||
|
||||
final advancedTroubleshooting = useAppSettingsState(AppSettingsEnum.advancedTroubleshooting);
|
||||
final manageLocalMediaAndroid = useAppSettingsState(AppSettingsEnum.manageLocalMediaAndroid);
|
||||
final isManageMediaSupported = useState(false);
|
||||
final manageMediaAndroidPermission = useState(false);
|
||||
final levelId = useAppSettingsState(AppSettingsEnum.logLevel);
|
||||
final preferRemote = useAppSettingsState(AppSettingsEnum.preferRemoteImage);
|
||||
final allowSelfSignedSSLCert = useAppSettingsState(AppSettingsEnum.allowSelfSignedSSLCert);
|
||||
@@ -51,6 +55,18 @@ class AdvancedSettings extends HookConsumerWidget {
|
||||
return false;
|
||||
}
|
||||
|
||||
useEffect(() {
|
||||
() async {
|
||||
isManageMediaSupported.value = await checkAndroidVersion();
|
||||
if (isManageMediaSupported.value) {
|
||||
manageMediaAndroidPermission.value = await ref
|
||||
.read(localFilesManagerRepositoryProvider)
|
||||
.hasManageMediaPermission();
|
||||
}
|
||||
}();
|
||||
return null;
|
||||
}, []);
|
||||
|
||||
final advancedSettings = [
|
||||
SettingsSwitchListTile(
|
||||
enabled: true,
|
||||
@@ -58,11 +74,10 @@ class AdvancedSettings extends HookConsumerWidget {
|
||||
title: "advanced_settings_troubleshooting_title".tr(),
|
||||
subtitle: "advanced_settings_troubleshooting_subtitle".tr(),
|
||||
),
|
||||
FutureBuilder<bool>(
|
||||
future: checkAndroidVersion(),
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData && snapshot.data == true) {
|
||||
return SettingsSwitchListTile(
|
||||
if (isManageMediaSupported.value)
|
||||
Column(
|
||||
children: [
|
||||
SettingsSwitchListTile(
|
||||
enabled: true,
|
||||
valueNotifier: manageLocalMediaAndroid,
|
||||
title: "advanced_settings_sync_remote_deletions_title".tr(),
|
||||
@@ -71,14 +86,24 @@ class AdvancedSettings extends HookConsumerWidget {
|
||||
if (value) {
|
||||
final result = await ref.read(localFilesManagerRepositoryProvider).requestManageMediaPermission();
|
||||
manageLocalMediaAndroid.value = result;
|
||||
manageMediaAndroidPermission.value = result;
|
||||
}
|
||||
},
|
||||
);
|
||||
} else {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
},
|
||||
),
|
||||
),
|
||||
SettingsActionTile(
|
||||
title: "manage_media_access_title".tr(),
|
||||
statusText: manageMediaAndroidPermission.value ? "allowed".tr() : "not_allowed".tr(),
|
||||
subtitle: "manage_media_access_rationale".tr(),
|
||||
statusColor: manageLocalMediaAndroid.value && !manageMediaAndroidPermission.value
|
||||
? const Color.fromARGB(255, 243, 188, 106)
|
||||
: null,
|
||||
onActionTap: () async {
|
||||
final result = await ref.read(localFilesManagerRepositoryProvider).manageMediaPermission();
|
||||
manageMediaAndroidPermission.value = result;
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
SettingsSliderListTile(
|
||||
text: "advanced_settings_log_level_title".tr(namedArgs: {'level': logLevel}),
|
||||
valueNotifier: levelId,
|
||||
|
||||
@@ -3,14 +3,18 @@ import 'dart:io';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/memory.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/trash_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/sync_status.provider.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/widgets/settings/beta_sync_settings/entity_count_tile.dart';
|
||||
import 'package:path/path.dart' as path;
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
@@ -229,6 +233,7 @@ class _SyncStatsCounts extends ConsumerWidget {
|
||||
final localAlbumService = ref.watch(localAlbumServiceProvider);
|
||||
final remoteAlbumService = ref.watch(remoteAlbumServiceProvider);
|
||||
final memoryService = ref.watch(driftMemoryServiceProvider);
|
||||
final appSettingsService = ref.watch(appSettingsServiceProvider);
|
||||
|
||||
Future<List<dynamic>> loadCounts() async {
|
||||
final assetCounts = assetService.getAssetCounts();
|
||||
@@ -351,6 +356,44 @@ class _SyncStatsCounts extends ConsumerWidget {
|
||||
],
|
||||
),
|
||||
),
|
||||
// To be removed once the experimental feature is stable
|
||||
if (CurrentPlatform.isAndroid &&
|
||||
appSettingsService.getSetting<bool>(AppSettingsEnum.manageLocalMediaAndroid)) ...[
|
||||
_SectionHeaderText(text: "trash".t(context: context)),
|
||||
Consumer(
|
||||
builder: (context, ref, _) {
|
||||
final counts = ref.watch(trashedAssetsCountProvider);
|
||||
return counts.when(
|
||||
data: (c) => Padding(
|
||||
padding: const EdgeInsets.fromLTRB(16, 8, 16, 16),
|
||||
child: Flex(
|
||||
direction: Axis.horizontal,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceBetween,
|
||||
spacing: 8.0,
|
||||
children: [
|
||||
Expanded(
|
||||
child: EntitiyCountTile(
|
||||
label: "local".t(context: context),
|
||||
count: c.total,
|
||||
icon: Icons.delete_outline,
|
||||
),
|
||||
),
|
||||
Expanded(
|
||||
child: EntitiyCountTile(
|
||||
label: "hashed_assets".t(context: context),
|
||||
count: c.hashed,
|
||||
icon: Icons.tag,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
loading: () => const CircularProgressIndicator(),
|
||||
error: (e, st) => Text('Error: $e'),
|
||||
);
|
||||
},
|
||||
),
|
||||
],
|
||||
],
|
||||
);
|
||||
},
|
||||
|
||||
73
mobile/lib/widgets/settings/settings_action_tile.dart
Normal file
73
mobile/lib/widgets/settings/settings_action_tile.dart
Normal file
@@ -0,0 +1,73 @@
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:immich_mobile/extensions/theme_extensions.dart';
|
||||
|
||||
class SettingsActionTile extends StatelessWidget {
|
||||
const SettingsActionTile({
|
||||
super.key,
|
||||
required this.title,
|
||||
required this.subtitle,
|
||||
required this.onActionTap,
|
||||
this.statusText,
|
||||
this.statusColor,
|
||||
this.contentPadding,
|
||||
this.titleStyle,
|
||||
this.subtitleStyle,
|
||||
});
|
||||
|
||||
final String title;
|
||||
final String subtitle;
|
||||
final String? statusText;
|
||||
final Color? statusColor;
|
||||
final VoidCallback onActionTap;
|
||||
final EdgeInsets? contentPadding;
|
||||
final TextStyle? titleStyle;
|
||||
final TextStyle? subtitleStyle;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final theme = Theme.of(context);
|
||||
|
||||
return ListTile(
|
||||
isThreeLine: true,
|
||||
onTap: onActionTap,
|
||||
titleAlignment: ListTileTitleAlignment.center,
|
||||
title: Row(
|
||||
children: [
|
||||
Expanded(
|
||||
child: Text(
|
||||
title,
|
||||
style: titleStyle ?? theme.textTheme.bodyLarge?.copyWith(fontWeight: FontWeight.w500, height: 1.5),
|
||||
),
|
||||
),
|
||||
if (statusText != null)
|
||||
Padding(
|
||||
padding: const EdgeInsets.only(left: 8),
|
||||
child: Chip(
|
||||
label: Text(
|
||||
statusText!,
|
||||
style: theme.textTheme.labelMedium?.copyWith(
|
||||
color: statusColor ?? theme.colorScheme.onSurfaceVariant,
|
||||
),
|
||||
),
|
||||
backgroundColor: theme.colorScheme.surface,
|
||||
side: BorderSide(color: statusColor ?? theme.colorScheme.outlineVariant),
|
||||
shape: StadiumBorder(side: BorderSide(color: statusColor ?? theme.colorScheme.outlineVariant)),
|
||||
materialTapTargetSize: MaterialTapTargetSize.shrinkWrap,
|
||||
visualDensity: VisualDensity.compact,
|
||||
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 0),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
subtitle: Padding(
|
||||
padding: const EdgeInsets.only(top: 4.0, right: 18.0),
|
||||
child: Text(
|
||||
subtitle,
|
||||
style: subtitleStyle ?? theme.textTheme.bodyMedium?.copyWith(color: theme.colorScheme.onSurfaceSecondary),
|
||||
),
|
||||
),
|
||||
trailing: Icon(Icons.arrow_forward_ios, size: 16, color: theme.colorScheme.onSurfaceVariant),
|
||||
contentPadding: contentPadding ?? const EdgeInsets.symmetric(horizontal: 20.0, vertical: 8.0),
|
||||
);
|
||||
}
|
||||
}
|
||||
185
mobile/mise.toml
Normal file
185
mobile/mise.toml
Normal file
@@ -0,0 +1,185 @@
|
||||
[tools]
|
||||
flutter = "3.35.7"
|
||||
|
||||
[tools."github:CQLabs/homebrew-dcm"]
|
||||
version = "1.30.0"
|
||||
bin = "dcm"
|
||||
postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
|
||||
|
||||
[tasks."codegen:dart"]
|
||||
alias = "codegen"
|
||||
description = "Execute build_runner to auto-generate dart code"
|
||||
sources = [
|
||||
"pubspec.yaml",
|
||||
"build.yaml",
|
||||
"lib/**/*.dart",
|
||||
"infrastructure/**/*.drift",
|
||||
]
|
||||
outputs = { auto = true }
|
||||
run = "dart run build_runner build --delete-conflicting-outputs"
|
||||
|
||||
[tasks."codegen:pigeon"]
|
||||
alias = "pigeon"
|
||||
description = "Generate pigeon platform code"
|
||||
depends = [
|
||||
"pigeon:native-sync",
|
||||
"pigeon:thumbnail",
|
||||
"pigeon:background-worker",
|
||||
"pigeon:background-worker-lock",
|
||||
"pigeon:connectivity",
|
||||
]
|
||||
|
||||
[tasks."codegen:translation"]
|
||||
alias = "translation"
|
||||
description = "Generate translations from i18n JSONs"
|
||||
run = [
|
||||
{ task = "//i18n:format-fix" },
|
||||
{ tasks = [
|
||||
"i18n:loader",
|
||||
"i18n:keys",
|
||||
] },
|
||||
]
|
||||
|
||||
[tasks."codegen:app-icon"]
|
||||
description = "Generate app icons"
|
||||
run = "flutter pub run flutter_launcher_icons:main"
|
||||
|
||||
[tasks."codegen:splash"]
|
||||
description = "Generate splash screen"
|
||||
run = "flutter pub run flutter_native_splash:create"
|
||||
|
||||
[tasks.test]
|
||||
description = "Run mobile tests"
|
||||
run = "flutter test"
|
||||
|
||||
[tasks.lint]
|
||||
description = "Analyze Dart code"
|
||||
depends = ["analyze:dart", "analyze:dcm"]
|
||||
|
||||
[tasks."lint-fix"]
|
||||
description = "Auto-fix Dart code"
|
||||
depends = ["analyze:fix:dart", "analyze:fix:dcm"]
|
||||
|
||||
[tasks.format]
|
||||
description = "Format Dart code"
|
||||
run = "dart format --set-exit-if-changed $(find lib -name '*.dart' -not \\( -name '*.g.dart' -o -name '*.drift.dart' -o -name '*.gr.dart' \\))"
|
||||
|
||||
[tasks."build:android"]
|
||||
description = "Build Android release"
|
||||
run = "flutter build appbundle"
|
||||
|
||||
[tasks."drift:migration"]
|
||||
alias = "migration"
|
||||
description = "Generate database migrations"
|
||||
run = "dart run drift_dev make-migrations"
|
||||
|
||||
|
||||
# Internal tasks
|
||||
[tasks."pigeon:native-sync"]
|
||||
description = "Generate native sync API pigeon code"
|
||||
hide = true
|
||||
sources = ["pigeon/native_sync_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/native_sync_api.g.dart",
|
||||
"ios/Runner/Sync/Messages.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/sync/Messages.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/native_sync_api.dart",
|
||||
"dart format lib/platform/native_sync_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."pigeon:thumbnail"]
|
||||
description = "Generate thumbnail API pigeon code"
|
||||
hide = true
|
||||
sources = ["pigeon/thumbnail_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/thumbnail_api.g.dart",
|
||||
"ios/Runner/Images/Thumbnails.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/images/Thumbnails.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/thumbnail_api.dart",
|
||||
"dart format lib/platform/thumbnail_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."pigeon:background-worker"]
|
||||
description = "Generate background worker API pigeon code"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_api.g.dart",
|
||||
"ios/Runner/Background/BackgroundWorker.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorker.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_api.dart",
|
||||
"dart format lib/platform/background_worker_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."pigeon:background-worker-lock"]
|
||||
description = "Generate background worker lock API pigeon code"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_lock_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_lock_api.g.dart",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorkerLock.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_lock_api.dart",
|
||||
"dart format lib/platform/background_worker_lock_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."pigeon:connectivity"]
|
||||
description = "Generate connectivity API pigeon code"
|
||||
hide = true
|
||||
sources = ["pigeon/connectivity_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/connectivity_api.g.dart",
|
||||
"ios/Runner/Connectivity/Connectivity.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/connectivity/Connectivity.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/connectivity_api.dart",
|
||||
"dart format lib/platform/connectivity_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."i18n:loader"]
|
||||
description = "Generate i18n loader"
|
||||
hide = true
|
||||
sources = ["i18n/"]
|
||||
outputs = "lib/generated/codegen_loader.g.dart"
|
||||
run = [
|
||||
"dart run easy_localization:generate -S ../i18n",
|
||||
"dart format lib/generated/codegen_loader.g.dart",
|
||||
]
|
||||
|
||||
[tasks."i18n:keys"]
|
||||
description = "Generate i18n keys"
|
||||
hide = true
|
||||
sources = ["i18n/en.json"]
|
||||
outputs = "lib/generated/intl_keys.g.dart"
|
||||
run = [
|
||||
"dart run bin/generate_keys.dart",
|
||||
"dart format lib/generated/intl_keys.g.dart",
|
||||
]
|
||||
|
||||
[tasks."analyze:dart"]
|
||||
description = "Run Dart analysis"
|
||||
hide = true
|
||||
run = "dart analyze --fatal-infos"
|
||||
|
||||
[tasks."analyze:dcm"]
|
||||
description = "Run Dart Code Metrics"
|
||||
hide = true
|
||||
run = "dcm analyze lib --fatal-style --fatal-warnings"
|
||||
|
||||
[tasks."analyze:fix:dart"]
|
||||
description = "Auto-fix Dart analysis"
|
||||
hide = true
|
||||
run = "dart fix --apply"
|
||||
|
||||
[tasks."analyze:fix:dcm"]
|
||||
description = "Auto-fix Dart Code Metrics"
|
||||
hide = true
|
||||
run = "dcm fix lib"
|
||||
3
mobile/openapi/README.md
generated
3
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 2.2.1
|
||||
- API version: 2.2.3
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
@@ -407,6 +407,7 @@ Class | Method | HTTP request | Description
|
||||
- [MemoriesUpdate](doc//MemoriesUpdate.md)
|
||||
- [MemoryCreateDto](doc//MemoryCreateDto.md)
|
||||
- [MemoryResponseDto](doc//MemoryResponseDto.md)
|
||||
- [MemorySearchOrder](doc//MemorySearchOrder.md)
|
||||
- [MemoryStatisticsResponseDto](doc//MemoryStatisticsResponseDto.md)
|
||||
- [MemoryType](doc//MemoryType.md)
|
||||
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
|
||||
|
||||
1
mobile/openapi/lib/api.dart
generated
1
mobile/openapi/lib/api.dart
generated
@@ -175,6 +175,7 @@ part 'model/memories_response.dart';
|
||||
part 'model/memories_update.dart';
|
||||
part 'model/memory_create_dto.dart';
|
||||
part 'model/memory_response_dto.dart';
|
||||
part 'model/memory_search_order.dart';
|
||||
part 'model/memory_statistics_response_dto.dart';
|
||||
part 'model/memory_type.dart';
|
||||
part 'model/memory_update_dto.dart';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user