Compare commits

..

6 Commits

Author SHA1 Message Date
shenlong-tanwen
c8e6080ddc replace trigger with manual pruning 2025-11-08 01:23:13 +05:30
shenlong-tanwen
70300ad3d4 ignore failed uploads from being retried on timer 2025-11-06 21:18:35 +05:30
shenlong-tanwen
91fe3c8d96 fetch failed uploads from local asset upload entity 2025-11-06 20:54:09 +05:30
Alex
3b6e23fed7 Merge branch 'main' into fix/periodic-batch-enqueue 2025-11-05 13:29:28 -06:00
shenlong-tanwen
d37cae5dcd track asset upload status and sort upload queue 2025-11-05 18:00:37 +05:30
shenlong-tanwen
94044c98bf fix: enqueue assets in batches for uploads 2025-11-05 18:00:37 +05:30
512 changed files with 10462 additions and 43485 deletions

View File

@@ -21,7 +21,6 @@ services:
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
- ../plugins:/build/corePlugin
immich-web:
env_file: !reset []
immich-machine-learning:

10
.github/mise.toml vendored
View File

@@ -1,10 +0,0 @@
[tasks.install]
run = "pnpm install --filter github --frozen-lockfile"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."

View File

@@ -165,7 +165,7 @@ jobs:
fi
- name: Publish Android Artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk
@@ -188,8 +188,8 @@ jobs:
needs: pre-job
permissions:
contents: read
# Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
# Run on main branch or workflow_dispatch
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true && github.ref == 'refs/heads/main' }}
runs-on: macos-latest
steps:
@@ -303,20 +303,12 @@ jobs:
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
ENVIRONMENT: ${{ inputs.environment || 'development' }}
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
GITHUB_REF: ${{ github.ref }}
working-directory: ./mobile/ios
run: |
# Only upload to TestFlight on main branch
if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then
if [[ "$ENVIRONMENT" == "development" ]]; then
bundle exec fastlane gha_testflight_dev
else
bundle exec fastlane gha_release_prod
fi
if [[ "$ENVIRONMENT" == "development" ]]; then
bundle exec fastlane gha_testflight_dev
else
# Build only, no TestFlight upload for non-main branches
bundle exec fastlane gha_build_only
bundle exec fastlane gha_release_prod
fi
- name: Clean up keychain
@@ -325,7 +317,7 @@ jobs:
security delete-keychain build.keychain || true
- name: Upload IPA artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: ios-release-ipa
path: mobile/ios/Runner.ipa

View File

@@ -84,7 +84,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
@@ -105,7 +105,7 @@ jobs:
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
flavor: |
latest=false

View File

@@ -35,7 +35,7 @@ jobs:
needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }}
container:
image: ghcr.io/immich-app/mdq:main@sha256:9c905a4ff69f00c4b2f98b40b6090ab3ab18d1a15ed1379733b8691aa1fcb271
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723
outputs:
checked: ${{ steps.get_checkbox.outputs.checked }}
steps:

View File

@@ -57,7 +57,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -70,7 +70,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -83,6 +83,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
with:
category: '/language:${{matrix.language}}'

View File

@@ -85,7 +85,7 @@ jobs:
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: docs-build-output
path: docs/build/

View File

@@ -174,7 +174,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run //deployment:tf apply'
run: 'mise run tf apply'
- name: Deploy Docs Subdomain Output
id: docs-output
@@ -186,7 +186,7 @@ jobs:
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: |
mise run //deployment:tf output -- -json | jq -r '
mise run tf output -- -json | jq -r '
"projectName=\(.pages_project_name.value)",
"subdomain=\(.immich_app_branch_subdomain.value)"
' >> $GITHUB_OUTPUT
@@ -211,7 +211,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs-release'
run: 'mise run //deployment:tf apply'
run: 'mise run tf apply'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0

View File

@@ -39,7 +39,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run //deployment:tf destroy -- -refresh=false'
run: 'mise run tf destroy -- -refresh=false'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0

View File

@@ -62,7 +62,7 @@ jobs:
ref: main
- name: Install uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
@@ -138,7 +138,7 @@ jobs:
persist-credentials: false
- name: Download APK
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: release-apk-signed
github-token: ${{ steps.generate-token.outputs.token }}

View File

@@ -1,170 +0,0 @@
name: Manage release PR
on:
workflow_dispatch:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
permissions: {}
jobs:
bump:
runs-on: ubuntu-latest
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
ref: main
- name: Install uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Determine release type
id: bump-type
uses: ietf-tools/semver-action@c90370b2958652d71c06a3484129a4d423a6d8a8 # v1.11.0
with:
token: ${{ steps.generate-token.outputs.token }}
- name: Bump versions
env:
TYPE: ${{ steps.bump-type.outputs.bump }}
run: |
if [ "$TYPE" == "none" ]; then
exit 1 # TODO: Is there a cleaner way to abort the workflow?
fi
misc/release/pump-version.sh -s $TYPE -m true
- name: Manage Outline release document
id: outline
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
NEXT_VERSION: ${{ steps.bump-type.outputs.next }}
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const fs = require('fs');
const outlineKey = process.env.OUTLINE_API_KEY;
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9'
const collectionId = 'e2910656-714c-4871-8721-447d9353bd73';
const baseUrl = 'https://outline.immich.cloud';
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ parentDocumentId })
});
if (!listResponse.ok) {
throw new Error(`Outline list failed: ${listResponse.statusText}`);
}
const listData = await listResponse.json();
const allDocuments = listData.data || [];
const document = allDocuments.find(doc => doc.title === 'next');
let documentId;
let documentUrl;
let documentText;
if (!document) {
// Create new document
console.log('No existing document found. Creating new one...');
const notesTmpl = fs.readFileSync('misc/release/notes.tmpl', 'utf8');
const createResponse = await fetch(`${baseUrl}/api/documents.create`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
title: 'next',
text: notesTmpl,
collectionId: collectionId,
parentDocumentId: parentDocumentId,
publish: true
})
});
if (!createResponse.ok) {
throw new Error(`Failed to create document: ${createResponse.statusText}`);
}
const createData = await createResponse.json();
documentId = createData.data.id;
const urlId = createData.data.urlId;
documentUrl = `${baseUrl}/doc/next-${urlId}`;
documentText = createData.data.text || '';
console.log(`Created new document: ${documentUrl}`);
} else {
documentId = document.id;
const docPath = document.url;
documentUrl = `${baseUrl}${docPath}`;
documentText = document.text || '';
console.log(`Found existing document: ${documentUrl}`);
}
// Generate GitHub release notes
console.log('Generating GitHub release notes...');
const releaseNotesResponse = await github.rest.repos.generateReleaseNotes({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: `${process.env.NEXT_VERSION}`,
});
// Combine the content
const changelog = `
# ${process.env.NEXT_VERSION}
${documentText}
${releaseNotesResponse.data.body}
---
`
const existingChangelog = fs.existsSync('CHANGELOG.md') ? fs.readFileSync('CHANGELOG.md', 'utf8') : '';
fs.writeFileSync('CHANGELOG.md', changelog + existingChangelog, 'utf8');
core.setOutput('document_url', documentUrl);
- name: Create PR
id: create-pr
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.generate-token.outputs.token }}
commit-message: 'chore: release ${{ steps.bump-type.outputs.next }}'
title: 'chore: release ${{ steps.bump-type.outputs.next }}'
body: 'Release notes: ${{ steps.outline.outputs.document_url }}'
labels: 'changelog:skip'
branch: 'release/next'
draft: true

View File

@@ -563,7 +563,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Install uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:

View File

@@ -1,29 +0,0 @@
[tasks.install]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks.build]
env._.path = "./node_modules/.bin"
run = "vite build"
[tasks.test]
env._.path = "./node_modules/.bin"
run = "vite"
[tasks.lint]
env._.path = "./node_modules/.bin"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."lint-fix"]
run = { task = "lint --fix" }
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."
[tasks.check]
env._.path = "./node_modules/.bin"
run = "tsc --noEmit"

View File

@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.19.0",
"@types/node": "^22.18.13",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",

View File

@@ -1,20 +0,0 @@
[tools]
terragrunt = "0.91.2"
opentofu = "1.10.6"
[tasks."tg:fmt"]
run = "terragrunt hclfmt"
description = "Format terragrunt files"
[tasks.tf]
run = "terragrunt run --all"
description = "Wrapper for terragrunt run-all"
dir = "{{cwd}}"
[tasks."tf:fmt"]
run = "tofu fmt -recursive tf/"
description = "Format terraform files"
[tasks."tf:init"]
run = { task = "tf init -- -reconfigure" }
dir = "{{cwd}}"

View File

@@ -41,7 +41,6 @@ services:
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
- ../plugins:/build/corePlugin
env_file:
- .env
environment:

View File

@@ -10,19 +10,16 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
## Prerequisites
You must install pgvector as it is a prerequisite for VectorChord.
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
:::note Supported versions
Immich is known to work with Postgres versions `>= 14, < 19`.
:::note
Immich is known to work with Postgres versions `>= 14, < 18`.
VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
The current accepted range for VectorChord is `>= 0.3, < 0.6`.
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
:::
## Specifying the connection URL

View File

@@ -12,13 +12,3 @@ pnpm run migrations:generate <migration-name>
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
## Reverting a Migration
If you need to undo the most recently applied migration—for example, when developing or testing on schema changes—run:
```bash
pnpm run migrations:revert
```
This command rolls back the latest migration and brings the database schema back to its previous state.

View File

@@ -5,7 +5,7 @@ sidebar_position: 2
# Setup
:::note
If there's a feature you're planning to work on, just give us a heads up in [#contributing](https://discord.com/channels/979116623879368755/1071165397228855327) on [our Discord](https://discord.immich.app) so we can:
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can:
1. Let you know if it's something we would accept into Immich
2. Provide any guidance on how something like that would ideally be implemented

View File

@@ -149,31 +149,29 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.

View File

@@ -1,25 +0,0 @@
[tasks.install]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks.start]
env._.path = "./node_modules/.bin"
run = "docusaurus --port 3005"
[tasks.build]
env._.path = "./node_modules/.bin"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks.preview]
env._.path = "./node_modules/.bin"
run = "docusaurus serve"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."

View File

@@ -1,29 +0,0 @@
[tasks.install]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks.test]
env._.path = "./node_modules/.bin"
run = "vitest --run"
[tasks."test-web"]
env._.path = "./node_modules/.bin"
run = "playwright test"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."
[tasks.lint]
env._.path = "./node_modules/.bin"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."lint-fix"]
run = { task = "lint --fix" }
[tasks.check]
env._.path = "./node_modules/.bin"
run = "tsc --noEmit"

View File

@@ -25,7 +25,7 @@
"@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2",
"@types/node": "^22.19.0",
"@types/node": "^22.18.13",
"@types/oidc-provider": "^9.0.0",
"@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4",

View File

@@ -1,4 +1,4 @@
import { LoginResponseDto, QueueCommand, QueueName, updateConfig } from '@immich/sdk';
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
import { cpSync, rmSync } from 'node:fs';
import { readFile } from 'node:fs/promises';
import { basename } from 'node:path';
@@ -17,28 +17,28 @@ describe('/jobs', () => {
describe('PUT /jobs', () => {
afterEach(async () => {
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.FaceDetection, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.SmartSearch, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.DuplicateDetection, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
command: JobCommand.Resume,
force: false,
});
@@ -59,8 +59,8 @@ describe('/jobs', () => {
it('should queue metadata extraction for missing assets', async () => {
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Pause,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Pause,
force: false,
});
@@ -77,20 +77,20 @@ describe('/jobs', () => {
expect(asset.exifInfo?.make).toBeNull();
}
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Empty,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Empty,
force: false,
});
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Start,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Start,
force: false,
});
@@ -124,8 +124,8 @@ describe('/jobs', () => {
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: QueueCommand.Start,
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
command: JobCommand.Start,
force: false,
});
@@ -144,8 +144,8 @@ describe('/jobs', () => {
it('should queue thumbnail extraction for assets missing thumbs', async () => {
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Pause,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Pause,
force: false,
});
@@ -153,32 +153,32 @@ describe('/jobs', () => {
assetData: { bytes: await readFile(path), filename: basename(path) },
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
expect(assetBefore.thumbhash).toBeNull();
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Empty,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Empty,
force: false,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Resume,
force: false,
});
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Start,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Start,
force: false,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
expect(assetAfter.thumbhash).not.toBeNull();
@@ -193,26 +193,26 @@ describe('/jobs', () => {
assetData: { bytes: await readFile(path), filename: basename(path) },
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Resume,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Resume,
force: false,
});
// This runs the missing thumbnail job
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: QueueCommand.Start,
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
command: JobCommand.Start,
force: false,
});
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);

View File

@@ -1,6 +1,6 @@
import {
JobName,
LoginResponseDto,
QueueName,
createStack,
deleteUserAdmin,
getMyUser,
@@ -328,7 +328,7 @@ describe('/admin/users', () => {
{ headers: asBearerAuth(user.accessToken) },
);
await utils.waitForQueueFinish(admin.accessToken, QueueName.BackgroundTask);
await utils.waitForQueueFinish(admin.accessToken, JobName.BackgroundTask);
const { status, body } = await request(app)
.delete(`/admin/users/${user.userId}`)

View File

@@ -1,4 +1,5 @@
import {
AllJobStatusResponseDto,
AssetMediaCreateDto,
AssetMediaResponseDto,
AssetResponseDto,
@@ -6,12 +7,11 @@ import {
CheckExistingAssetsDto,
CreateAlbumDto,
CreateLibraryDto,
JobCommandDto,
JobName,
MetadataSearchDto,
Permission,
PersonCreateDto,
QueueCommandDto,
QueueName,
QueuesResponseDto,
SharedLinkCreateDto,
UpdateLibraryDto,
UserAdminCreateDto,
@@ -27,14 +27,14 @@ import {
createStack,
createUserAdmin,
deleteAssets,
getAllJobsStatus,
getAssetInfo,
getConfig,
getConfigDefaults,
getQueuesLegacy,
login,
runQueueCommandLegacy,
scanLibrary,
searchAssets,
sendJobCommand,
setBaseUrl,
signUpAdmin,
tagAssets,
@@ -477,8 +477,8 @@ export const utils = {
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
jobCommand: async (accessToken: string, jobName: JobName, jobCommandDto: JobCommandDto) =>
sendJobCommand({ id: jobName, jobCommandDto }, { headers: asBearerAuth(accessToken) }),
setAuthCookies: async (context: BrowserContext, accessToken: string, domain = '127.0.0.1') =>
await context.addCookies([
@@ -524,13 +524,13 @@ export const utils = {
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
},
isQueueEmpty: async (accessToken: string, queue: keyof QueuesResponseDto) => {
const queues = await getQueuesLegacy({ headers: asBearerAuth(accessToken) });
isQueueEmpty: async (accessToken: string, queue: keyof AllJobStatusResponseDto) => {
const queues = await getAllJobsStatus({ headers: asBearerAuth(accessToken) });
const jobCounts = queues[queue].jobCounts;
return !jobCounts.active && !jobCounts.waiting;
},
waitForQueueFinish: (accessToken: string, queue: keyof QueuesResponseDto, ms?: number) => {
waitForQueueFinish: (accessToken: string, queue: keyof AllJobStatusResponseDto, ms?: number) => {
// eslint-disable-next-line no-async-promise-executor
return new Promise<void>(async (resolve, reject) => {
const timeout = setTimeout(() => reject(new Error('Timed out waiting for queue to empty')), ms || 10_000);

View File

@@ -52,7 +52,7 @@ test.describe('User Administration', () => {
await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit' }).click();
await page.getByRole('button', { name: 'Edit user' }).click();
await expect(page.getByLabel('Admin User')).not.toBeChecked();
await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).toBeChecked();
@@ -77,7 +77,7 @@ test.describe('User Administration', () => {
await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit' }).click();
await page.getByRole('button', { name: 'Edit user' }).click();
await expect(page.getByLabel('Admin User')).toBeChecked();
await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).not.toBeChecked();

View File

@@ -32,7 +32,6 @@
"add_to_album_toggle": "Toggle selection for {album}",
"add_to_albums": "Add to albums",
"add_to_albums_count": "Add to albums ({count})",
"add_to_bottom_bar": "Add to",
"add_to_shared_album": "Add to shared album",
"add_upload_to_stack": "Add upload to stack",
"add_url": "Add URL",
@@ -431,7 +430,6 @@
"age_months": "Age {months, plural, one {# month} other {# months}}",
"age_year_months": "Age 1 year, {months, plural, one {# month} other {# months}}",
"age_years": "{years, plural, other {Age #}}",
"album": "Album",
"album_added": "Album added",
"album_added_notification_setting_description": "Receive an email notification when you are added to a shared album",
"album_cover_updated": "Album cover updated",
@@ -477,7 +475,6 @@
"allow_edits": "Allow edits",
"allow_public_user_to_download": "Allow public user to download",
"allow_public_user_to_upload": "Allow public user to upload",
"allowed": "Allowed",
"alt_text_qr_code": "QR code image",
"anti_clockwise": "Anti-clockwise",
"api_key": "API Key",
@@ -1199,8 +1196,6 @@
"import_path": "Import path",
"in_albums": "In {count, plural, one {# album} other {# albums}}",
"in_archive": "In archive",
"in_year": "In {year}",
"in_year_selector": "In",
"include_archived": "Include archived",
"include_shared_albums": "Include shared albums",
"include_shared_partner_assets": "Include shared partner assets",
@@ -1237,7 +1232,6 @@
"language_setting_description": "Select your preferred language",
"large_files": "Large Files",
"last": "Last",
"last_months": "{count, plural, one {Last month} other {Last # months}}",
"last_seen": "Last seen",
"latest_version": "Latest Version",
"latitude": "Latitude",
@@ -1320,10 +1314,6 @@
"main_menu": "Main menu",
"make": "Make",
"manage_geolocation": "Manage location",
"manage_media_access_rationale": "This permission is required for proper handling of moving assets to the trash and restoring them from it.",
"manage_media_access_settings": "Open settings",
"manage_media_access_subtitle": "Allow the Immich app to manage and move media files.",
"manage_media_access_title": "Media Management Access",
"manage_shared_links": "Manage shared links",
"manage_sharing_with_partners": "Manage sharing with partners",
"manage_the_app_settings": "Manage the app settings",
@@ -1387,7 +1377,6 @@
"more": "More",
"move": "Move",
"move_off_locked_folder": "Move out of locked folder",
"move_to": "Move to",
"move_to_lock_folder_action_prompt": "{count} added to the locked folder",
"move_to_locked_folder": "Move to locked folder",
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
@@ -1417,7 +1406,6 @@
"new_pin_code": "New PIN code",
"new_pin_code_subtitle": "This is your first time accessing the locked folder. Create a PIN code to securely access this page",
"new_timeline": "New Timeline",
"new_update": "New update",
"new_user_created": "New user created",
"new_version_available": "NEW VERSION AVAILABLE",
"newest_first": "Newest first",
@@ -1433,7 +1421,6 @@
"no_cast_devices_found": "No cast devices found",
"no_checksum_local": "No checksum available - cannot fetch local assets",
"no_checksum_remote": "No checksum available - cannot fetch remote asset",
"no_devices": "No authorized devices",
"no_duplicates_found": "No duplicates were found.",
"no_exif_info_available": "No exif info available",
"no_explore_results_message": "Upload more photos to explore your collection.",
@@ -1450,7 +1437,6 @@
"no_results_description": "Try a synonym or more general keyword",
"no_shared_albums_message": "Create an album to share photos and videos with people in your network",
"no_uploads_in_progress": "No uploads in progress",
"not_allowed": "Not allowed",
"not_available": "N/A",
"not_in_any_album": "Not in any album",
"not_selected": "Not selected",
@@ -1561,8 +1547,6 @@
"photos_count": "{count, plural, one {{count, number} Photo} other {{count, number} Photos}}",
"photos_from_previous_years": "Photos from previous years",
"pick_a_location": "Pick a location",
"pick_custom_range": "Custom range",
"pick_date_range": "Select a date range",
"pin_code_changed_successfully": "Successfully changed PIN code",
"pin_code_reset_successfully": "Successfully reset PIN code",
"pin_code_setup_successfully": "Successfully setup a PIN code",
@@ -2043,7 +2027,6 @@
"third_party_resources": "Third-Party Resources",
"time": "Time",
"time_based_memories": "Time-based memories",
"time_based_memories_duration": "Number of seconds to display each image.",
"timeline": "Timeline",
"timezone": "Timezone",
"to_archive": "Archive",
@@ -2184,7 +2167,6 @@
"welcome": "Welcome",
"welcome_to_immich": "Welcome to Immich",
"wifi_name": "Wi-Fi Name",
"workflow": "Workflow",
"wrong_pin_code": "Wrong PIN code",
"year": "Year",
"years_ago": "{years, plural, one {# year} other {# years}} ago",

View File

@@ -13,8 +13,6 @@ from rich.logging import RichHandler
from uvicorn import Server
from uvicorn.workers import UvicornWorker
from .schemas import ModelPrecision
class ClipSettings(BaseModel):
textual: str | None = None
@@ -26,11 +24,6 @@ class FacialRecognitionSettings(BaseModel):
detection: str | None = None
class OcrSettings(BaseModel):
recognition: str | None = None
detection: str | None = None
class PreloadModelData(BaseModel):
clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None)
facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None)
@@ -44,7 +37,6 @@ class PreloadModelData(BaseModel):
del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"]
clip: ClipSettings = ClipSettings()
facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings()
ocr: OcrSettings = OcrSettings()
class MaxBatchSize(BaseModel):
@@ -78,7 +70,6 @@ class Settings(BaseSettings):
rknn_threads: int = 1
preload: PreloadModelData | None = None
max_batch_size: MaxBatchSize | None = None
openvino_precision: ModelPrecision = ModelPrecision.FP32
@property
def device_id(self) -> str:

View File

@@ -103,20 +103,6 @@ async def preload_models(preload: PreloadModelData) -> None:
ModelTask.FACIAL_RECOGNITION,
)
if preload.ocr.detection is not None:
await load_models(
preload.ocr.detection,
ModelType.DETECTION,
ModelTask.OCR,
)
if preload.ocr.recognition is not None:
await load_models(
preload.ocr.recognition,
ModelType.RECOGNITION,
ModelTask.OCR,
)
if preload.clip_fallback is not None:
log.warning(
"Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. "

View File

@@ -78,14 +78,6 @@ _INSIGHTFACE_MODELS = {
_PADDLE_MODELS = {
"PP-OCRv5_server",
"PP-OCRv5_mobile",
"CH__PP-OCRv5_server",
"CH__PP-OCRv5_mobile",
"EL__PP-OCRv5_mobile",
"EN__PP-OCRv5_mobile",
"ESLAV__PP-OCRv5_mobile",
"KOREAN__PP-OCRv5_mobile",
"LATIN__PP-OCRv5_mobile",
"TH__PP-OCRv5_mobile",
}
SUPPORTED_PROVIDERS = [

View File

@@ -6,7 +6,7 @@ from numpy.typing import NDArray
from PIL import Image
from rapidocr.ch_ppocr_det.utils import DBPostProcess
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType
@@ -23,7 +23,7 @@ class TextDetector(InferenceModel):
identity = (ModelType.DETECTION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
super().__init__(model_name.split("__")[-1], **model_kwargs, model_format=ModelFormat.ONNX)
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
self.max_resolution = 736
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)

View File

@@ -6,7 +6,7 @@ from PIL import Image
from rapidocr.ch_ppocr_rec import TextRecInput
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangRec, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType
from rapidocr.utils.vis_res import VisRes
@@ -25,7 +25,6 @@ class TextRecognizer(InferenceModel):
identity = (ModelType.RECOGNITION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
self.language = LangRec[model_name.split("__")[0]] if "__" in model_name else LangRec.CH
self.min_score = model_kwargs.get("minScore", 0.9)
self._empty: TextRecognitionOutput = {
"box": np.empty(0, dtype=np.float32),
@@ -42,7 +41,7 @@ class TextRecognizer(InferenceModel):
engine_type=EngineType.ONNXRUNTIME,
ocr_version=OCRVersion.PPOCRV5,
task_type=TaskType.REC,
lang_type=self.language,
lang_type=LangRec.CH,
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
)
)
@@ -62,7 +61,6 @@ class TextRecognizer(InferenceModel):
session=session.session,
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
rec_img_shape=(3, 48, 320),
lang_type=self.language,
)
)
return session

View File

@@ -20,8 +20,8 @@ class TextRecognitionOutput(TypedDict):
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
class OcrOptions(dict[str, Any]):
def __init__(self, lang_type: LangRec | None = None, **options: Any) -> None:
def __init__(self, **options: Any) -> None:
super().__init__(**options)
self.engine_type = EngineType.ONNXRUNTIME
self.lang_type = lang_type
self.lang_type = LangRec.CH
self.font_path = None

View File

@@ -46,11 +46,6 @@ class ModelSource(StrEnum):
PADDLE = "paddle"
class ModelPrecision(StrEnum):
FP16 = "FP16"
FP32 = "FP32"
ModelIdentity = tuple[ModelType, ModelTask]

View File

@@ -93,12 +93,10 @@ class OrtSession:
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
case "OpenVINOExecutionProvider":
openvino_dir = self.model_path.parent / "openvino"
device = f"GPU.{settings.device_id}"
options = {
"device_type": device,
"precision": settings.openvino_precision.value,
"cache_dir": openvino_dir.as_posix(),
"device_type": f"GPU.{settings.device_id}",
"precision": "FP32",
"cache_dir": (self.model_path.parent / "openvino").as_posix(),
}
case "CoreMLExecutionProvider":
options = {

View File

@@ -26,7 +26,7 @@ from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEn
from immich_ml.models.clip.visual import OpenClipVisualEncoder
from immich_ml.models.facial_recognition.detection import FaceDetector
from immich_ml.models.facial_recognition.recognition import FaceRecognizer
from immich_ml.schemas import ModelFormat, ModelPrecision, ModelTask, ModelType
from immich_ml.schemas import ModelFormat, ModelTask, ModelType
from immich_ml.sessions.ann import AnnSession
from immich_ml.sessions.ort import OrtSession
from immich_ml.sessions.rknn import RknnSession, run_inference
@@ -240,16 +240,11 @@ class TestOrtSession:
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
model_path = "/cache/ViT-B-32__openai/model.onnx"
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
assert session.provider_options == [
{
"device_type": "GPU.0",
"precision": "FP32",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
},
{"device_type": "GPU.0", "precision": "FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"},
{"arena_extend_strategy": "kSameAsRequested"},
]
@@ -267,21 +262,6 @@ class TestOrtSession:
}
]
def test_sets_openvino_to_fp16_if_enabled(self, mocker: MockerFixture) -> None:
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
mocker.patch.object(settings, "openvino_precision", ModelPrecision.FP16)
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider"])
assert session.provider_options == [
{
"device_type": "GPU.1",
"precision": "FP16",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
}
]
def test_sets_provider_options_for_cuda(self) -> None:
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
@@ -437,7 +417,7 @@ class TestRknnSession:
session.run(None, input_feed)
rknn_session.return_value.put.assert_called_once_with([input1, input2])
assert np_spy.call_count == 2
np_spy.call_count == 2
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
@@ -945,34 +925,11 @@ class TestCache:
any_order=True,
)
async def test_preloads_ocr_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings()
assert settings.preload is not None
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
await preload_models(settings.preload)
mock_get_model.assert_has_calls(
[
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
],
any_order=True,
)
async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings()
assert settings.preload is not None
@@ -980,8 +937,6 @@ class TestCache:
assert settings.preload.clip.textual == "ViT-B-32__openai"
assert settings.preload.facial_recognition.recognition == "buffalo_s"
assert settings.preload.facial_recognition.detection == "buffalo_s"
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
@@ -993,8 +948,6 @@ class TestCache:
mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH),
mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION),
mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION),
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
],
any_order=True,
)

View File

@@ -3,12 +3,12 @@
#
# Pump one or both of the server/mobile versions in appropriate files
#
# usage: './scripts/pump-version.sh -s <major|minor|patch> <-m> <true|false>
# usage: './scripts/pump-version.sh -s <major|minor|patch> <-m>
#
# examples:
# ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50
# ./scripts/pump-version.sh -s minor -m true # 1.0.0+50 => 1.1.0+51
# ./scripts/pump-version.sh -m true # 1.0.0+50 => 1.0.0+51
# ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50
# ./scripts/pump-version.sh -s minor -m # 1.0.0+50 => 1.1.0+51
# ./scripts/pump-version.sh -m # 1.0.0+50 => 1.0.0+51
#
SERVER_PUMP="false"

515
mise.toml
View File

@@ -1,9 +1,7 @@
experimental_monorepo_root = true
[tools]
node = "24.11.0"
flutter = "3.35.7"
pnpm = "10.20.0"
pnpm = "10.19.0"
terragrunt = "0.91.2"
opentofu = "1.10.6"
@@ -16,21 +14,514 @@ postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
experimental = true
pin = true
# SDK tasks
# .github
[tasks."github:install"]
run = "pnpm install --filter github --frozen-lockfile"
[tasks."github:format"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --check ."
[tasks."github:format-fix"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --write ."
# @immich/cli
[tasks."cli:install"]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks."cli:build"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite build"
[tasks."cli:test"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite"
[tasks."cli:lint"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."cli:lint-fix"]
run = "mise run cli:lint --fix"
[tasks."cli:format"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --check ."
[tasks."cli:format-fix"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --write ."
[tasks."cli:check"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "tsc --noEmit"
# @immich/sdk
[tasks."sdk:install"]
dir = "open-api/typescript-sdk"
run = "pnpm install --filter @immich/sdk --frozen-lockfile"
[tasks."sdk:build"]
dir = "open-api/typescript-sdk"
env._.path = "./node_modules/.bin"
env._.path = "./open-api/typescript-sdk/node_modules/.bin"
dir = "./open-api/typescript-sdk"
run = "tsc"
# i18n tasks
# docs
[tasks."docs:install"]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks."docs:start"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus --port 3005"
[tasks."docs:build"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks."docs:preview"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus serve"
[tasks."docs:format"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --check ."
[tasks."docs:format-fix"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --write ."
# e2e
[tasks."e2e:install"]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks."e2e:test"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "vitest --run"
[tasks."e2e:test-web"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "playwright test"
[tasks."e2e:format"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --check ."
[tasks."e2e:format-fix"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --write ."
[tasks."e2e:lint"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."e2e:lint-fix"]
run = "mise run e2e:lint --fix"
[tasks."e2e:check"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "tsc --noEmit"
# i18n
[tasks."i18n:format"]
dir = "i18n"
run = { task = ":i18n:format-fix" }
run = "mise run i18n:format-fix"
[tasks."i18n:format-fix"]
dir = "i18n"
run = "pnpm dlx sort-json *.json"
run = "pnpm dlx sort-json ./i18n/*.json"
# server
[tasks."server:install"]
run = "pnpm install --filter immich --frozen-lockfile"
[tasks."server:build"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "nest build"
[tasks."server:test"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.mjs"
[tasks."server:test-medium"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.medium.mjs"
[tasks."server:format"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --check ."
[tasks."server:format-fix"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --write ."
[tasks."server:lint"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
[tasks."server:lint-fix"]
run = "mise run server:lint --fix"
[tasks."server:check"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "tsc --noEmit"
[tasks."server:sql"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:open-api"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:migrations"]
dir = "server"
run = "node ./dist/bin/migrations.js"
description = "Run database migration commands (create, generate, run, debug, or query)"
[tasks."server:schema-drop"]
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
[tasks."server:schema-reset"]
run = "mise run server:schema-drop && mise run server:migrations run"
[tasks."server:email-dev"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "email dev -p 3050 --dir src/emails"
[tasks."server:checklist"]
run = [
"mise run server:install",
"mise run server:format",
"mise run server:lint",
"mise run server:check",
"mise run server:test-medium --run",
"mise run server:test --run",
]
# web
[tasks."web:install"]
run = "pnpm install --filter immich-web --frozen-lockfile"
[tasks."web:svelte-kit-sync"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "svelte-kit sync"
[tasks."web:build"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:build-stats"]
env.BUILD_STATS = "true"
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:preview"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite preview"
[tasks."web:start"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vite dev --host 0.0.0.0 --port 3000"
[tasks."web:test"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vitest"
[tasks."web:format"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --check ."
[tasks."web:format-fix"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --write ."
[tasks."web:lint"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint . --max-warnings 0 --concurrency 4"
[tasks."web:lint-fix"]
run = "mise run web:lint --fix"
[tasks."web:check"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "tsc --noEmit"
[tasks."web:check-svelte"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "svelte-check --no-tsconfig --fail-on-warnings"
[tasks."web:checklist"]
run = [
"mise run web:install",
"mise run web:format",
"mise run web:check",
"mise run web:test --run",
"mise run web:lint",
]
# mobile
[tasks."mobile:codegen:dart"]
alias = "mobile:codegen"
description = "Execute build_runner to auto-generate dart code"
dir = "mobile"
sources = [
"pubspec.yaml",
"build.yaml",
"lib/**/*.dart",
"infrastructure/**/*.drift",
]
outputs = { auto = true }
run = "dart run build_runner build --delete-conflicting-outputs"
[tasks."mobile:codegen:pigeon"]
alias = "mobile:pigeon"
description = "Generate pigeon platform code"
dir = "mobile"
depends = [
"mobile:pigeon:native-sync",
"mobile:pigeon:thumbnail",
"mobile:pigeon:background-worker",
"mobile:pigeon:background-worker-lock",
"mobile:pigeon:connectivity",
]
[tasks."mobile:codegen:translation"]
alias = "mobile:translation"
description = "Generate translations from i18n JSONs"
dir = "mobile"
run = [
{ task = "i18n:format-fix" },
{ tasks = [
"mobile:i18n:loader",
"mobile:i18n:keys",
] },
]
[tasks."mobile:codegen:app-icon"]
description = "Generate app icons"
dir = "mobile"
run = "flutter pub run flutter_launcher_icons:main"
[tasks."mobile:codegen:splash"]
description = "Generate splash screen"
dir = "mobile"
run = "flutter pub run flutter_native_splash:create"
[tasks."mobile:test"]
description = "Run mobile tests"
dir = "mobile"
run = "flutter test"
[tasks."mobile:lint"]
description = "Analyze Dart code"
dir = "mobile"
depends = ["mobile:analyze:dart", "mobile:analyze:dcm"]
[tasks."mobile:lint-fix"]
description = "Auto-fix Dart code"
dir = "mobile"
depends = ["mobile:analyze:fix:dart", "mobile:analyze:fix:dcm"]
[tasks."mobile:format"]
description = "Format Dart code"
dir = "mobile"
run = "dart format --set-exit-if-changed $(find lib -name '*.dart' -not \\( -name '*.g.dart' -o -name '*.drift.dart' -o -name '*.gr.dart' \\))"
[tasks."mobile:build:android"]
description = "Build Android release"
dir = "mobile"
run = "flutter build appbundle"
[tasks."mobile:drift:migration"]
alias = "mobile:migration"
description = "Generate database migrations"
dir = "mobile"
run = "dart run drift_dev make-migrations"
# mobile internal tasks
[tasks."mobile:pigeon:native-sync"]
description = "Generate native sync API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/native_sync_api.dart"]
outputs = [
"lib/platform/native_sync_api.g.dart",
"ios/Runner/Sync/Messages.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/sync/Messages.g.kt",
]
run = [
"dart run pigeon --input pigeon/native_sync_api.dart",
"dart format lib/platform/native_sync_api.g.dart",
]
[tasks."mobile:pigeon:thumbnail"]
description = "Generate thumbnail API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/thumbnail_api.dart"]
outputs = [
"lib/platform/thumbnail_api.g.dart",
"ios/Runner/Images/Thumbnails.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/images/Thumbnails.g.kt",
]
run = [
"dart run pigeon --input pigeon/thumbnail_api.dart",
"dart format lib/platform/thumbnail_api.g.dart",
]
[tasks."mobile:pigeon:background-worker"]
description = "Generate background worker API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/background_worker_api.dart"]
outputs = [
"lib/platform/background_worker_api.g.dart",
"ios/Runner/Background/BackgroundWorker.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorker.g.kt",
]
run = [
"dart run pigeon --input pigeon/background_worker_api.dart",
"dart format lib/platform/background_worker_api.g.dart",
]
[tasks."mobile:pigeon:background-worker-lock"]
description = "Generate background worker lock API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/background_worker_lock_api.dart"]
outputs = [
"lib/platform/background_worker_lock_api.g.dart",
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorkerLock.g.kt",
]
run = [
"dart run pigeon --input pigeon/background_worker_lock_api.dart",
"dart format lib/platform/background_worker_lock_api.g.dart",
]
[tasks."mobile:pigeon:connectivity"]
description = "Generate connectivity API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/connectivity_api.dart"]
outputs = [
"lib/platform/connectivity_api.g.dart",
"ios/Runner/Connectivity/Connectivity.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/connectivity/Connectivity.g.kt",
]
run = [
"dart run pigeon --input pigeon/connectivity_api.dart",
"dart format lib/platform/connectivity_api.g.dart",
]
[tasks."mobile:i18n:loader"]
description = "Generate i18n loader"
dir = "mobile"
hide = true
sources = ["i18n/"]
outputs = "lib/generated/codegen_loader.g.dart"
run = [
"dart run easy_localization:generate -S ../i18n",
"dart format lib/generated/codegen_loader.g.dart",
]
[tasks."mobile:i18n:keys"]
description = "Generate i18n keys"
dir = "mobile"
hide = true
sources = ["i18n/en.json"]
outputs = "lib/generated/intl_keys.g.dart"
run = [
"dart run bin/generate_keys.dart",
"dart format lib/generated/intl_keys.g.dart",
]
[tasks."mobile:analyze:dart"]
description = "Run Dart analysis"
dir = "mobile"
hide = true
run = "dart analyze --fatal-infos"
[tasks."mobile:analyze:dcm"]
description = "Run Dart Code Metrics"
dir = "mobile"
hide = true
run = "dcm analyze lib --fatal-style --fatal-warnings"
[tasks."mobile:analyze:fix:dart"]
description = "Auto-fix Dart analysis"
dir = "mobile"
hide = true
run = "dart fix --apply"
[tasks."mobile:analyze:fix:dcm"]
description = "Auto-fix Dart Code Metrics"
dir = "mobile"
hide = true
run = "dcm fix lib"
# docs deployment
[tasks."tg:fmt"]
run = "terragrunt hclfmt"
description = "Format terragrunt files"
[tasks.tf]
run = "terragrunt run --all"
description = "Wrapper for terragrunt run-all"
dir = "{{cwd}}"
[tasks."tf:fmt"]
run = "tofu fmt -recursive tf/"
description = "Format terraform files"
[tasks."tf:init"]
run = "mise run tf init -- -reconfigure"
dir = "{{cwd}}"

View File

@@ -105,7 +105,6 @@ dependencies {
def serialization_version = '1.8.1'
def compose_version = '1.1.1'
def gson_version = '2.10.1'
def room_version = "2.8.3"
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:$kotlin_coroutines_version"
@@ -114,8 +113,6 @@ dependencies {
implementation "com.google.guava:guava:$guava_version"
implementation "com.github.bumptech.glide:glide:$glide_version"
implementation "org.jetbrains.kotlinx:kotlinx-serialization-json:$serialization_version"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-guava:1.10.2"
implementation "com.squareup.okhttp3:okhttp:5.3.1"
ksp "com.github.bumptech.glide:ksp:$glide_version"
coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:2.1.2'
@@ -130,10 +127,6 @@ dependencies {
implementation "androidx.compose.ui:ui-tooling:$compose_version"
implementation "androidx.compose.material3:material3:1.2.1"
implementation "androidx.lifecycle:lifecycle-runtime-ktx:2.6.2"
// Room Database
implementation "androidx.room:room-runtime:$room_version"
ksp "androidx.room:room-compiler:$room_version"
}
// This is uncommented in F-Droid build script

View File

@@ -143,7 +143,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
val mediaUrls = call.argument<List<String>>("mediaUrls")
if (mediaUrls != null) {
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
moveToTrash(mediaUrls, result)
moveToTrash(mediaUrls, result)
} else {
result.error("PERMISSION_DENIED", "Media permission required", null)
}
@@ -155,23 +155,15 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
"restoreFromTrash" -> {
val fileName = call.argument<String>("fileName")
val type = call.argument<Int>("type")
val mediaId = call.argument<String>("mediaId")
if (fileName != null && type != null) {
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
restoreFromTrash(fileName, type, result)
} else {
result.error("PERMISSION_DENIED", "Media permission required", null)
}
} else
if (mediaId != null && type != null) {
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
restoreFromTrashById(mediaId, type, result)
} else {
result.error("PERMISSION_DENIED", "Media permission required", null)
}
} else {
result.error("INVALID_PARAMS", "Required params are not specified.", null)
}
} else {
result.error("INVALID_NAME", "The file name is not specified.", null)
}
}
"requestManageMediaPermission" -> {
@@ -183,17 +175,6 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
}
}
"hasManageMediaPermission" -> {
if (hasManageMediaPermission()) {
Log.i("Manage storage permission", "Permission already granted")
result.success(true)
} else {
result.success(false)
}
}
"manageMediaPermission" -> requestManageMediaPermission(result)
else -> result.notImplemented()
}
}
@@ -242,48 +223,26 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
uri.let { toggleTrash(listOf(it), false, result) }
}
@RequiresApi(Build.VERSION_CODES.R)
private fun restoreFromTrashById(mediaId: String, type: Int, result: Result) {
val id = mediaId.toLongOrNull()
if (id == null) {
result.error("INVALID_ID", "The file id is not a valid number: $mediaId", null)
return
}
if (!isInTrash(id)) {
result.error("TrashNotFound", "Item with id=$id not found in trash", null)
return
}
val uri = ContentUris.withAppendedId(contentUriForType(type), id)
try {
Log.i(TAG, "restoreFromTrashById: uri=$uri (type=$type,id=$id)")
restoreUris(listOf(uri), result)
} catch (e: Exception) {
Log.w(TAG, "restoreFromTrashById failed", e)
}
}
@RequiresApi(Build.VERSION_CODES.R)
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
val activity = activityBinding?.activity
val contentResolver = context?.contentResolver
if (activity == null || contentResolver == null) {
result.error("TrashError", "Activity or ContentResolver not available", null)
return
}
val activity = activityBinding?.activity
val contentResolver = context?.contentResolver
if (activity == null || contentResolver == null) {
result.error("TrashError", "Activity or ContentResolver not available", null)
return
}
try {
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
pendingResult = result // Store for onActivityResult
activity.startIntentSenderForResult(
pendingIntent.intentSender,
trashRequestCode,
null, 0, 0, 0
)
} catch (e: Exception) {
Log.e("TrashError", "Error creating or starting trash request", e)
result.error("TrashError", "Error creating or starting trash request", null)
try {
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
pendingResult = result // Store for onActivityResult
activity.startIntentSenderForResult(
pendingIntent.intentSender,
trashRequestCode,
null, 0, 0, 0
)
} catch (e: Exception) {
Log.e("TrashError", "Error creating or starting trash request", e)
result.error("TrashError", "Error creating or starting trash request", null)
}
}
@@ -305,7 +264,14 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
contentResolver.query(queryUri, projection, queryArgs, null)?.use { cursor ->
if (cursor.moveToFirst()) {
val id = cursor.getLong(cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID))
return ContentUris.withAppendedId(contentUriForType(type), id)
// same order as AssetType from dart
val contentUri = when (type) {
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
else -> queryUri
}
return ContentUris.withAppendedId(contentUri, id)
}
}
return null
@@ -349,40 +315,6 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
}
return false
}
@RequiresApi(Build.VERSION_CODES.R)
private fun isInTrash(id: Long): Boolean {
val contentResolver = context?.contentResolver ?: return false
val filesUri = MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
val args = Bundle().apply {
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, "${MediaStore.Files.FileColumns._ID}=?")
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, arrayOf(id.toString()))
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
putInt(ContentResolver.QUERY_ARG_LIMIT, 1)
}
return contentResolver.query(filesUri, arrayOf(MediaStore.Files.FileColumns._ID), args, null)
?.use { it.moveToFirst() } == true
}
@RequiresApi(Build.VERSION_CODES.R)
private fun restoreUris(uris: List<Uri>, result: Result) {
if (uris.isEmpty()) {
result.error("TrashError", "No URIs to restore", null)
return
}
Log.i(TAG, "restoreUris: count=${uris.size}, first=${uris.first()}")
toggleTrash(uris, false, result)
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun contentUriForType(type: Int): Uri =
when (type) {
// same order as AssetType from dart
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
else -> MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
}
}
private const val TAG = "BackgroundServicePlugin"

View File

@@ -7,13 +7,11 @@ import androidx.work.Configuration
import androidx.work.WorkManager
import app.alextran.immich.background.BackgroundEngineLock
import app.alextran.immich.background.BackgroundWorkerApiImpl
import app.alextran.immich.upload.NetworkMonitor
class ImmichApp : Application() {
override fun onCreate() {
super.onCreate()
val config = Configuration.Builder().build()
NetworkMonitor.initialize(this)
WorkManager.initialize(this, config)
// always start BackupWorker after WorkManager init; this fixes the following bug:
// After the process is killed (by user or system), the first trigger (taking a new picture) is lost.

View File

@@ -15,8 +15,6 @@ import app.alextran.immich.images.ThumbnailsImpl
import app.alextran.immich.sync.NativeSyncApi
import app.alextran.immich.sync.NativeSyncApiImpl26
import app.alextran.immich.sync.NativeSyncApiImpl30
import app.alextran.immich.upload.UploadApi
import app.alextran.immich.upload.UploadTaskImpl
import io.flutter.embedding.android.FlutterFragmentActivity
import io.flutter.embedding.engine.FlutterEngine
@@ -41,7 +39,6 @@ class MainActivity : FlutterFragmentActivity() {
ThumbnailApi.setUp(messenger, ThumbnailsImpl(ctx))
BackgroundWorkerFgHostApi.setUp(messenger, BackgroundWorkerApiImpl(ctx))
ConnectivityApi.setUp(messenger, ConnectivityApiImpl(ctx))
UploadApi.setUp(messenger, UploadTaskImpl(ctx))
flutterEngine.plugins.add(BackgroundServicePlugin())
flutterEngine.plugins.add(HttpSSLOptionsPlugin())

View File

@@ -1,114 +0,0 @@
package app.alextran.immich.schema
import androidx.room.TypeConverter
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import java.net.URL
import java.util.Date
class Converters {
private val gson = Gson()
@TypeConverter
fun fromTimestamp(value: Long?): Date? = value?.let { Date(it * 1000) }
@TypeConverter
fun dateToTimestamp(date: Date?): Long? = date?.let { it.time / 1000 }
@TypeConverter
fun fromUrl(value: String?): URL? = value?.let { URL(it) }
@TypeConverter
fun urlToString(url: URL?): String? = url?.toString()
@TypeConverter
fun fromStoreKey(value: Int?): StoreKey? = value?.let { StoreKey.fromInt(it) }
@TypeConverter
fun storeKeyToInt(storeKey: StoreKey?): Int? = storeKey?.rawValue
@TypeConverter
fun fromTaskStatus(value: Int?): TaskStatus? = value?.let { TaskStatus.entries[it] }
@TypeConverter
fun taskStatusToInt(status: TaskStatus?): Int? = status?.ordinal
@TypeConverter
fun fromBackupSelection(value: Int?): BackupSelection? = value?.let { BackupSelection.entries[it] }
@TypeConverter
fun backupSelectionToInt(selection: BackupSelection?): Int? = selection?.ordinal
@TypeConverter
fun fromAvatarColor(value: Int?): AvatarColor? = value?.let { AvatarColor.entries[it] }
@TypeConverter
fun avatarColorToInt(color: AvatarColor?): Int? = color?.ordinal
@TypeConverter
fun fromAlbumUserRole(value: Int?): AlbumUserRole? = value?.let { AlbumUserRole.entries[it] }
@TypeConverter
fun albumUserRoleToInt(role: AlbumUserRole?): Int? = role?.ordinal
@TypeConverter
fun fromMemoryType(value: Int?): MemoryType? = value?.let { MemoryType.entries[it] }
@TypeConverter
fun memoryTypeToInt(type: MemoryType?): Int? = type?.ordinal
@TypeConverter
fun fromAssetVisibility(value: Int?): AssetVisibility? = value?.let { AssetVisibility.entries[it] }
@TypeConverter
fun assetVisibilityToInt(visibility: AssetVisibility?): Int? = visibility?.ordinal
@TypeConverter
fun fromSourceType(value: String?): SourceType? = value?.let { SourceType.fromString(it) }
@TypeConverter
fun sourceTypeToString(type: SourceType?): String? = type?.value
@TypeConverter
fun fromUploadMethod(value: Int?): UploadMethod? = value?.let { UploadMethod.entries[it] }
@TypeConverter
fun uploadMethodToInt(method: UploadMethod?): Int? = method?.ordinal
@TypeConverter
fun fromUploadErrorCode(value: Int?): UploadErrorCode? = value?.let { UploadErrorCode.entries[it] }
@TypeConverter
fun uploadErrorCodeToInt(code: UploadErrorCode?): Int? = code?.ordinal
@TypeConverter
fun fromAssetType(value: Int?): AssetType? = value?.let { AssetType.entries[it] }
@TypeConverter
fun assetTypeToInt(type: AssetType?): Int? = type?.ordinal
@TypeConverter
fun fromStringMap(value: String?): Map<String, String>? {
val type = object : TypeToken<Map<String, String>>() {}.type
return gson.fromJson(value, type)
}
@TypeConverter
fun stringMapToString(map: Map<String, String>?): String? = gson.toJson(map)
@TypeConverter
fun fromEndpointStatus(value: String?): EndpointStatus? = value?.let { EndpointStatus.fromString(it) }
@TypeConverter
fun endpointStatusToString(status: EndpointStatus?): String? = status?.value
@TypeConverter
fun fromEndpointList(value: String?): List<Endpoint>? {
val type = object : TypeToken<List<Endpoint>>() {}.type
return gson.fromJson(value, type)
}
@TypeConverter
fun endpointListToString(list: List<Endpoint>?): String? = gson.toJson(list)
}

View File

@@ -1,59 +0,0 @@
package app.alextran.immich.schema
import android.content.Context
import androidx.room.Database
import androidx.room.Room
import androidx.room.RoomDatabase
import androidx.room.TypeConverters
@Database(
entities = [
AssetFace::class,
AuthUser::class,
LocalAlbum::class,
LocalAlbumAsset::class,
LocalAsset::class,
MemoryAsset::class,
Memory::class,
Partner::class,
Person::class,
RemoteAlbum::class,
RemoteAlbumAsset::class,
RemoteAlbumUser::class,
RemoteAsset::class,
RemoteExif::class,
Stack::class,
Store::class,
UploadTask::class,
UploadTaskStat::class,
User::class,
UserMetadata::class
],
version = 1,
exportSchema = false
)
@TypeConverters(Converters::class)
abstract class AppDatabase : RoomDatabase() {
abstract fun localAssetDao(): LocalAssetDao
abstract fun storeDao(): StoreDao
abstract fun uploadTaskDao(): UploadTaskDao
abstract fun uploadTaskStatDao(): UploadTaskStatDao
companion object {
@Volatile
private var INSTANCE: AppDatabase? = null
fun getDatabase(context: Context): AppDatabase {
return INSTANCE ?: synchronized(this) {
val instance = Room.databaseBuilder(
context.applicationContext,
AppDatabase::class.java,
"app_database"
).build()
INSTANCE = instance
instance
}
}
}
}

View File

@@ -1,267 +0,0 @@
package app.alextran.immich.schema
import java.net.URL
import java.util.Date
enum class StoreKey(val rawValue: Int) {
VERSION(0),
DEVICE_ID_HASH(3),
BACKUP_TRIGGER_DELAY(8),
TILES_PER_ROW(103),
GROUP_ASSETS_BY(105),
UPLOAD_ERROR_NOTIFICATION_GRACE_PERIOD(106),
THUMBNAIL_CACHE_SIZE(110),
IMAGE_CACHE_SIZE(111),
ALBUM_THUMBNAIL_CACHE_SIZE(112),
SELECTED_ALBUM_SORT_ORDER(113),
LOG_LEVEL(115),
MAP_RELATIVE_DATE(119),
MAP_THEME_MODE(124),
ASSET_ETAG(1),
CURRENT_USER(2),
DEVICE_ID(4),
ACCESS_TOKEN(11),
SERVER_ENDPOINT(12),
SSL_CLIENT_CERT_DATA(15),
SSL_CLIENT_PASSWD(16),
THEME_MODE(102),
CUSTOM_HEADERS(127),
PRIMARY_COLOR(128),
PREFERRED_WIFI_NAME(133),
EXTERNAL_ENDPOINT_LIST(135),
LOCAL_ENDPOINT(134),
SERVER_URL(10),
BACKUP_FAILED_SINCE(5),
BACKUP_REQUIRE_WIFI(6),
BACKUP_REQUIRE_CHARGING(7),
AUTO_BACKUP(13),
BACKGROUND_BACKUP(14),
LOAD_PREVIEW(100),
LOAD_ORIGINAL(101),
DYNAMIC_LAYOUT(104),
BACKGROUND_BACKUP_TOTAL_PROGRESS(107),
BACKGROUND_BACKUP_SINGLE_PROGRESS(108),
STORAGE_INDICATOR(109),
ADVANCED_TROUBLESHOOTING(114),
PREFER_REMOTE_IMAGE(116),
LOOP_VIDEO(117),
MAP_SHOW_FAVORITE_ONLY(118),
SELF_SIGNED_CERT(120),
MAP_INCLUDE_ARCHIVED(121),
IGNORE_ICLOUD_ASSETS(122),
SELECTED_ALBUM_SORT_REVERSE(123),
MAP_WITH_PARTNERS(125),
ENABLE_HAPTIC_FEEDBACK(126),
DYNAMIC_THEME(129),
COLORFUL_INTERFACE(130),
SYNC_ALBUMS(131),
AUTO_ENDPOINT_SWITCHING(132),
LOAD_ORIGINAL_VIDEO(136),
MANAGE_LOCAL_MEDIA_ANDROID(137),
READONLY_MODE_ENABLED(138),
AUTO_PLAY_VIDEO(139),
PHOTO_MANAGER_CUSTOM_FILTER(1000),
BETA_PROMPT_SHOWN(1001),
BETA_TIMELINE(1002),
ENABLE_BACKUP(1003),
USE_WIFI_FOR_UPLOAD_VIDEOS(1004),
USE_WIFI_FOR_UPLOAD_PHOTOS(1005),
NEED_BETA_MIGRATION(1006),
SHOULD_RESET_SYNC(1007);
companion object {
fun fromInt(value: Int): StoreKey? = entries.find { it.rawValue == value }
// Int keys
val version = TypedStoreKey<Int>(VERSION)
val deviceIdHash = TypedStoreKey<Int>(DEVICE_ID_HASH)
val backupTriggerDelay = TypedStoreKey<Int>(BACKUP_TRIGGER_DELAY)
val tilesPerRow = TypedStoreKey<Int>(TILES_PER_ROW)
val groupAssetsBy = TypedStoreKey<Int>(GROUP_ASSETS_BY)
val uploadErrorNotificationGracePeriod = TypedStoreKey<Int>(UPLOAD_ERROR_NOTIFICATION_GRACE_PERIOD)
val thumbnailCacheSize = TypedStoreKey<Int>(THUMBNAIL_CACHE_SIZE)
val imageCacheSize = TypedStoreKey<Int>(IMAGE_CACHE_SIZE)
val albumThumbnailCacheSize = TypedStoreKey<Int>(ALBUM_THUMBNAIL_CACHE_SIZE)
val selectedAlbumSortOrder = TypedStoreKey<Int>(SELECTED_ALBUM_SORT_ORDER)
val logLevel = TypedStoreKey<Int>(LOG_LEVEL)
val mapRelativeDate = TypedStoreKey<Int>(MAP_RELATIVE_DATE)
val mapThemeMode = TypedStoreKey<Int>(MAP_THEME_MODE)
// String keys
val assetETag = TypedStoreKey<String>(ASSET_ETAG)
val currentUser = TypedStoreKey<String>(CURRENT_USER)
val deviceId = TypedStoreKey<String>(DEVICE_ID)
val accessToken = TypedStoreKey<String>(ACCESS_TOKEN)
val sslClientCertData = TypedStoreKey<String>(SSL_CLIENT_CERT_DATA)
val sslClientPasswd = TypedStoreKey<String>(SSL_CLIENT_PASSWD)
val themeMode = TypedStoreKey<String>(THEME_MODE)
val customHeaders = TypedStoreKey<Map<String, String>>(CUSTOM_HEADERS)
val primaryColor = TypedStoreKey<String>(PRIMARY_COLOR)
val preferredWifiName = TypedStoreKey<String>(PREFERRED_WIFI_NAME)
// Endpoint keys
val externalEndpointList = TypedStoreKey<List<Endpoint>>(EXTERNAL_ENDPOINT_LIST)
// URL keys
val localEndpoint = TypedStoreKey<URL>(LOCAL_ENDPOINT)
val serverEndpoint = TypedStoreKey<URL>(SERVER_ENDPOINT)
val serverUrl = TypedStoreKey<URL>(SERVER_URL)
// Date keys
val backupFailedSince = TypedStoreKey<Date>(BACKUP_FAILED_SINCE)
// Bool keys
val backupRequireWifi = TypedStoreKey<Boolean>(BACKUP_REQUIRE_WIFI)
val backupRequireCharging = TypedStoreKey<Boolean>(BACKUP_REQUIRE_CHARGING)
val autoBackup = TypedStoreKey<Boolean>(AUTO_BACKUP)
val backgroundBackup = TypedStoreKey<Boolean>(BACKGROUND_BACKUP)
val loadPreview = TypedStoreKey<Boolean>(LOAD_PREVIEW)
val loadOriginal = TypedStoreKey<Boolean>(LOAD_ORIGINAL)
val dynamicLayout = TypedStoreKey<Boolean>(DYNAMIC_LAYOUT)
val backgroundBackupTotalProgress = TypedStoreKey<Boolean>(BACKGROUND_BACKUP_TOTAL_PROGRESS)
val backgroundBackupSingleProgress = TypedStoreKey<Boolean>(BACKGROUND_BACKUP_SINGLE_PROGRESS)
val storageIndicator = TypedStoreKey<Boolean>(STORAGE_INDICATOR)
val advancedTroubleshooting = TypedStoreKey<Boolean>(ADVANCED_TROUBLESHOOTING)
val preferRemoteImage = TypedStoreKey<Boolean>(PREFER_REMOTE_IMAGE)
val loopVideo = TypedStoreKey<Boolean>(LOOP_VIDEO)
val mapShowFavoriteOnly = TypedStoreKey<Boolean>(MAP_SHOW_FAVORITE_ONLY)
val selfSignedCert = TypedStoreKey<Boolean>(SELF_SIGNED_CERT)
val mapIncludeArchived = TypedStoreKey<Boolean>(MAP_INCLUDE_ARCHIVED)
val ignoreIcloudAssets = TypedStoreKey<Boolean>(IGNORE_ICLOUD_ASSETS)
val selectedAlbumSortReverse = TypedStoreKey<Boolean>(SELECTED_ALBUM_SORT_REVERSE)
val mapwithPartners = TypedStoreKey<Boolean>(MAP_WITH_PARTNERS)
val enableHapticFeedback = TypedStoreKey<Boolean>(ENABLE_HAPTIC_FEEDBACK)
val dynamicTheme = TypedStoreKey<Boolean>(DYNAMIC_THEME)
val colorfulInterface = TypedStoreKey<Boolean>(COLORFUL_INTERFACE)
val syncAlbums = TypedStoreKey<Boolean>(SYNC_ALBUMS)
val autoEndpointSwitching = TypedStoreKey<Boolean>(AUTO_ENDPOINT_SWITCHING)
val loadOriginalVideo = TypedStoreKey<Boolean>(LOAD_ORIGINAL_VIDEO)
val manageLocalMediaAndroid = TypedStoreKey<Boolean>(MANAGE_LOCAL_MEDIA_ANDROID)
val readonlyModeEnabled = TypedStoreKey<Boolean>(READONLY_MODE_ENABLED)
val autoPlayVideo = TypedStoreKey<Boolean>(AUTO_PLAY_VIDEO)
val photoManagerCustomFilter = TypedStoreKey<Boolean>(PHOTO_MANAGER_CUSTOM_FILTER)
val betaPromptShown = TypedStoreKey<Boolean>(BETA_PROMPT_SHOWN)
val betaTimeline = TypedStoreKey<Boolean>(BETA_TIMELINE)
val enableBackup = TypedStoreKey<Boolean>(ENABLE_BACKUP)
val useWifiForUploadVideos = TypedStoreKey<Boolean>(USE_WIFI_FOR_UPLOAD_VIDEOS)
val useWifiForUploadPhotos = TypedStoreKey<Boolean>(USE_WIFI_FOR_UPLOAD_PHOTOS)
val needBetaMigration = TypedStoreKey<Boolean>(NEED_BETA_MIGRATION)
val shouldResetSync = TypedStoreKey<Boolean>(SHOULD_RESET_SYNC)
}
}
enum class TaskStatus {
DOWNLOAD_PENDING,
DOWNLOAD_QUEUED,
DOWNLOAD_FAILED,
UPLOAD_PENDING,
UPLOAD_QUEUED,
UPLOAD_FAILED,
UPLOAD_COMPLETE
}
enum class BackupSelection {
SELECTED,
NONE,
EXCLUDED
}
enum class AvatarColor {
PRIMARY,
PINK,
RED,
YELLOW,
BLUE,
GREEN,
PURPLE,
ORANGE,
GRAY,
AMBER
}
enum class AlbumUserRole {
EDITOR,
VIEWER
}
enum class MemoryType {
ON_THIS_DAY
}
enum class AssetVisibility {
TIMELINE,
HIDDEN,
ARCHIVE,
LOCKED
}
enum class SourceType(val value: String) {
MACHINE_LEARNING("machine-learning"),
EXIF("exif"),
MANUAL("manual");
companion object {
fun fromString(value: String): SourceType? = entries.find { it.value == value }
}
}
enum class UploadMethod {
MULTIPART,
RESUMABLE
}
enum class UploadErrorCode {
UNKNOWN,
ASSET_NOT_FOUND,
FILE_NOT_FOUND,
RESOURCE_NOT_FOUND,
INVALID_RESOURCE,
ENCODING_FAILED,
WRITE_FAILED,
NOT_ENOUGH_SPACE,
NETWORK_ERROR,
PHOTOS_INTERNAL_ERROR,
PHOTOS_UNKNOWN_ERROR,
NO_SERVER_URL,
NO_DEVICE_ID,
NO_ACCESS_TOKEN,
INTERRUPTED,
CANCELLED,
DOWNLOAD_STALLED,
FORCE_QUIT,
OUT_OF_RESOURCES,
BACKGROUND_UPDATES_DISABLED,
UPLOAD_TIMEOUT,
ICLOUD_RATE_LIMIT,
ICLOUD_THROTTLED,
INVALID_SERVER_RESPONSE,
}
enum class AssetType {
OTHER,
IMAGE,
VIDEO,
AUDIO
}
enum class EndpointStatus(val value: String) {
LOADING("loading"),
VALID("valid"),
ERROR("error"),
UNKNOWN("unknown");
companion object {
fun fromString(value: String): EndpointStatus? = entries.find { it.value == value }
}
}
// Endpoint data class
data class Endpoint(
val url: String,
val status: EndpointStatus
)

View File

@@ -1,168 +0,0 @@
package app.alextran.immich.schema
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import app.alextran.immich.upload.TaskConfig
import java.util.Date
@Dao
interface LocalAssetDao {
@Query("""
SELECT a.id, a.type FROM local_asset_entity a
WHERE EXISTS (
SELECT 1 FROM local_album_asset_entity laa
INNER JOIN local_album_entity la ON laa.album_id = la.id
WHERE laa.asset_id = a.id
AND la.backup_selection = 0 -- selected
)
AND NOT EXISTS (
SELECT 1 FROM local_album_asset_entity laa2
INNER JOIN local_album_entity la2 ON laa2.album_id = la2.id
WHERE laa2.asset_id = a.id
AND la2.backup_selection = 2 -- excluded
)
AND NOT EXISTS (
SELECT 1 FROM remote_asset_entity ra
WHERE ra.checksum = a.checksum
AND ra.owner_id = (SELECT string_value FROM store_entity WHERE id = 14) -- current_user
)
AND NOT EXISTS (
SELECT 1 FROM upload_tasks ut
WHERE ut.local_id = a.id
)
LIMIT :limit
""")
suspend fun getCandidatesForBackup(limit: Int): List<BackupCandidate>
}
@Dao
interface StoreDao {
@Query("SELECT * FROM store_entity WHERE id = :key")
suspend fun get(key: StoreKey): Store?
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insert(store: Store)
// Extension functions for type-safe access
suspend fun <T> get(
typedKey: TypedStoreKey<T>,
storage: StorageType<T>
): T? {
val store = get(typedKey.key) ?: return null
return when (storage) {
is StorageType.IntStorage,
is StorageType.BoolStorage,
is StorageType.DateStorage -> {
store.intValue?.let { storage.fromDb(it) }
}
else -> {
store.stringValue?.let { storage.fromDb(it) }
}
}
}
suspend fun <T> set(
typedKey: TypedStoreKey<T>,
value: T,
storage: StorageType<T>
) {
val dbValue = storage.toDb(value)
val store = when (storage) {
is StorageType.IntStorage,
is StorageType.BoolStorage,
is StorageType.DateStorage -> {
Store(
id = typedKey.key,
stringValue = null,
intValue = dbValue as Int
)
}
else -> {
Store(
id = typedKey.key,
stringValue = dbValue as String,
intValue = null
)
}
}
insert(store)
}
}
@Dao
interface UploadTaskDao {
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insertAll(tasks: List<UploadTask>)
@Query("""
SELECT id FROM upload_tasks
WHERE status IN (:statuses)
""")
suspend fun getTaskIdsByStatus(statuses: List<TaskStatus>): List<Long>
@Query("""
UPDATE upload_tasks
SET status = 3, -- upload_pending
file_path = NULL,
attempts = 0
WHERE id IN (:taskIds)
""")
suspend fun resetOrphanedTasks(taskIds: List<Long>)
@Query("""
SELECT
t.attempts,
a.checksum,
a.created_at as createdAt,
a.name as fileName,
t.file_path as filePath,
a.is_favorite as isFavorite,
a.id as localId,
t.priority,
t.id as taskId,
a.type,
a.updated_at as updatedAt
FROM upload_tasks t
INNER JOIN local_asset_entity a ON t.local_id = a.id
WHERE t.status = 3 -- upload_pending
AND t.attempts < :maxAttempts
AND a.checksum IS NOT NULL
AND (t.retry_after IS NULL OR t.retry_after <= :currentTime)
ORDER BY t.priority DESC, t.created_at ASC
LIMIT :limit
""")
suspend fun getTasksForUpload(limit: Int, maxAttempts: Int = TaskConfig.MAX_ATTEMPTS, currentTime: Long = System.currentTimeMillis() / 1000): List<LocalAssetTaskData>
@Query("SELECT EXISTS(SELECT 1 FROM upload_tasks WHERE status = 3 LIMIT 1)") // upload_pending
suspend fun hasPendingTasks(): Boolean
@Query("""
UPDATE upload_tasks
SET attempts = :attempts,
last_error = :errorCode,
status = :status,
retry_after = :retryAfter
WHERE id = :taskId
""")
suspend fun updateTaskAfterFailure(
taskId: Long,
attempts: Int,
errorCode: UploadErrorCode,
status: TaskStatus,
retryAfter: Date?
)
@Query("UPDATE upload_tasks SET status = :status WHERE id = :id")
suspend fun updateStatus(id: Long, status: TaskStatus)
}
@Dao
interface UploadTaskStatDao {
@Query("SELECT * FROM upload_task_stats")
suspend fun getStats(): UploadTaskStat?
}

View File

@@ -1,93 +0,0 @@
package app.alextran.immich.schema
import com.google.gson.Gson
import java.net.URL
import java.util.Date
// Sealed interface representing storage types
sealed interface StorageType<T> {
fun toDb(value: T): Any
fun fromDb(value: Any): T
data object IntStorage : StorageType<Int> {
override fun toDb(value: Int) = value
override fun fromDb(value: Any) = value as Int
}
data object BoolStorage : StorageType<Boolean> {
override fun toDb(value: Boolean) = if (value) 1 else 0
override fun fromDb(value: Any) = (value as Int) == 1
}
data object StringStorage : StorageType<String> {
override fun toDb(value: String) = value
override fun fromDb(value: Any) = value as String
}
data object DateStorage : StorageType<Date> {
override fun toDb(value: Date) = value.time / 1000
override fun fromDb(value: Any) = Date((value as Long) * 1000)
}
data object UrlStorage : StorageType<URL> {
override fun toDb(value: URL) = value.toString()
override fun fromDb(value: Any) = URL(value as String)
}
class JsonStorage<T>(
private val clazz: Class<T>,
private val gson: Gson = Gson()
) : StorageType<T> {
override fun toDb(value: T) = gson.toJson(value)
override fun fromDb(value: Any) = gson.fromJson(value as String, clazz)
}
}
// Typed key wrapper
@JvmInline
value class TypedStoreKey<T>(val key: StoreKey) {
companion object {
// Factory methods for type-safe key creation
inline fun <reified T> of(key: StoreKey): TypedStoreKey<T> = TypedStoreKey(key)
}
}
// Registry mapping keys to their storage types
object StoreRegistry {
private val intKeys = setOf(
StoreKey.VERSION,
StoreKey.DEVICE_ID_HASH,
StoreKey.BACKUP_TRIGGER_DELAY
)
private val stringKeys = setOf(
StoreKey.CURRENT_USER,
StoreKey.DEVICE_ID,
StoreKey.ACCESS_TOKEN
)
fun usesIntStorage(key: StoreKey): Boolean = key in intKeys
fun usesStringStorage(key: StoreKey): Boolean = key in stringKeys
}
// Storage type registry for automatic selection
@Suppress("UNCHECKED_CAST")
object StorageTypes {
inline fun <reified T> get(): StorageType<T> = when (T::class) {
Int::class -> StorageType.IntStorage as StorageType<T>
Boolean::class -> StorageType.BoolStorage as StorageType<T>
String::class -> StorageType.StringStorage as StorageType<T>
Date::class -> StorageType.DateStorage as StorageType<T>
URL::class -> StorageType.UrlStorage as StorageType<T>
else -> StorageType.JsonStorage(T::class.java)
}
}
// Simplified extension functions with automatic storage
suspend inline fun <reified T> StoreDao.get(typedKey: TypedStoreKey<T>): T? {
return get(typedKey, StorageTypes.get<T>())
}
suspend inline fun <reified T> StoreDao.set(typedKey: TypedStoreKey<T>, value: T) {
set(typedKey, value, StorageTypes.get<T>())
}

View File

@@ -1,405 +0,0 @@
package app.alextran.immich.schema
import androidx.room.*
import java.net.URL
import java.util.Date
@Entity(tableName = "asset_face_entity")
data class AssetFace(
@PrimaryKey
val id: String,
@ColumnInfo(name = "asset_id")
val assetId: String,
@ColumnInfo(name = "person_id")
val personId: String?,
@ColumnInfo(name = "image_width")
val imageWidth: Int,
@ColumnInfo(name = "image_height")
val imageHeight: Int,
@ColumnInfo(name = "bounding_box_x1")
val boundingBoxX1: Int,
@ColumnInfo(name = "bounding_box_y1")
val boundingBoxY1: Int,
@ColumnInfo(name = "bounding_box_x2")
val boundingBoxX2: Int,
@ColumnInfo(name = "bounding_box_y2")
val boundingBoxY2: Int,
@ColumnInfo(name = "source_type")
val sourceType: SourceType
)
@Entity(tableName = "auth_user_entity")
data class AuthUser(
@PrimaryKey
val id: String,
val name: String,
val email: String,
@ColumnInfo(name = "is_admin")
val isAdmin: Boolean,
@ColumnInfo(name = "has_profile_image")
val hasProfileImage: Boolean,
@ColumnInfo(name = "profile_changed_at")
val profileChangedAt: Date,
@ColumnInfo(name = "avatar_color")
val avatarColor: AvatarColor,
@ColumnInfo(name = "quota_size_in_bytes")
val quotaSizeInBytes: Int,
@ColumnInfo(name = "quota_usage_in_bytes")
val quotaUsageInBytes: Int,
@ColumnInfo(name = "pin_code")
val pinCode: String?
)
@Entity(tableName = "local_album_entity")
data class LocalAlbum(
@PrimaryKey
val id: String,
@ColumnInfo(name = "backup_selection")
val backupSelection: BackupSelection,
@ColumnInfo(name = "linked_remote_album_id")
val linkedRemoteAlbumId: String?,
@ColumnInfo(name = "marker")
val marker: Boolean?,
val name: String,
@ColumnInfo(name = "is_ios_shared_album")
val isIosSharedAlbum: Boolean,
@ColumnInfo(name = "updated_at")
val updatedAt: Date
)
@Entity(
tableName = "local_album_asset_entity",
primaryKeys = ["asset_id", "album_id"]
)
data class LocalAlbumAsset(
@ColumnInfo(name = "asset_id")
val assetId: String,
@ColumnInfo(name = "album_id")
val albumId: String,
@ColumnInfo(name = "marker")
val marker: String?
)
@Entity(tableName = "local_asset_entity")
data class LocalAsset(
@PrimaryKey
val id: String,
val checksum: String?,
@ColumnInfo(name = "created_at")
val createdAt: Date,
@ColumnInfo(name = "duration_in_seconds")
val durationInSeconds: Int?,
val height: Int?,
@ColumnInfo(name = "is_favorite")
val isFavorite: Boolean,
val name: String,
val orientation: String,
val type: AssetType,
@ColumnInfo(name = "updated_at")
val updatedAt: Date,
val width: Int?
)
data class BackupCandidate(
val id: String,
val type: AssetType
)
@Entity(
tableName = "memory_asset_entity",
primaryKeys = ["asset_id", "album_id"]
)
data class MemoryAsset(
@ColumnInfo(name = "asset_id")
val assetId: String,
@ColumnInfo(name = "album_id")
val albumId: String
)
@Entity(tableName = "memory_entity")
data class Memory(
@PrimaryKey
val id: String,
@ColumnInfo(name = "created_at")
val createdAt: Date,
@ColumnInfo(name = "updated_at")
val updatedAt: Date,
@ColumnInfo(name = "deleted_at")
val deletedAt: Date?,
@ColumnInfo(name = "owner_id")
val ownerId: String,
val type: MemoryType,
val data: String,
@ColumnInfo(name = "is_saved")
val isSaved: Boolean,
@ColumnInfo(name = "memory_at")
val memoryAt: Date,
@ColumnInfo(name = "seen_at")
val seenAt: Date?,
@ColumnInfo(name = "show_at")
val showAt: Date?,
@ColumnInfo(name = "hide_at")
val hideAt: Date?
)
@Entity(
tableName = "partner_entity",
primaryKeys = ["shared_by_id", "shared_with_id"]
)
data class Partner(
@ColumnInfo(name = "shared_by_id")
val sharedById: String,
@ColumnInfo(name = "shared_with_id")
val sharedWithId: String,
@ColumnInfo(name = "in_timeline")
val inTimeline: Boolean
)
@Entity(tableName = "person_entity")
data class Person(
@PrimaryKey
val id: String,
@ColumnInfo(name = "created_at")
val createdAt: Date,
@ColumnInfo(name = "updated_at")
val updatedAt: Date,
@ColumnInfo(name = "owner_id")
val ownerId: String,
val name: String,
@ColumnInfo(name = "face_asset_id")
val faceAssetId: String?,
@ColumnInfo(name = "is_favorite")
val isFavorite: Boolean,
@ColumnInfo(name = "is_hidden")
val isHidden: Boolean,
val color: String?,
@ColumnInfo(name = "birth_date")
val birthDate: Date?
)
@Entity(tableName = "remote_album_entity")
data class RemoteAlbum(
@PrimaryKey
val id: String,
@ColumnInfo(name = "created_at")
val createdAt: Date,
val description: String?,
@ColumnInfo(name = "is_activity_enabled")
val isActivityEnabled: Boolean,
val name: String,
val order: Int,
@ColumnInfo(name = "owner_id")
val ownerId: String,
@ColumnInfo(name = "thumbnail_asset_id")
val thumbnailAssetId: String?,
@ColumnInfo(name = "updated_at")
val updatedAt: Date
)
@Entity(
tableName = "remote_album_asset_entity",
primaryKeys = ["asset_id", "album_id"]
)
data class RemoteAlbumAsset(
@ColumnInfo(name = "asset_id")
val assetId: String,
@ColumnInfo(name = "album_id")
val albumId: String
)
@Entity(
tableName = "remote_album_user_entity",
primaryKeys = ["album_id", "user_id"]
)
data class RemoteAlbumUser(
@ColumnInfo(name = "album_id")
val albumId: String,
@ColumnInfo(name = "user_id")
val userId: String,
val role: AlbumUserRole
)
@Entity(tableName = "remote_asset_entity")
data class RemoteAsset(
@PrimaryKey
val id: String,
val checksum: String,
@ColumnInfo(name = "is_favorite")
val isFavorite: Boolean,
@ColumnInfo(name = "deleted_at")
val deletedAt: Date?,
@ColumnInfo(name = "owner_id")
val ownerId: String,
@ColumnInfo(name = "local_date_time")
val localDateTime: Date?,
@ColumnInfo(name = "thumb_hash")
val thumbHash: String?,
@ColumnInfo(name = "library_id")
val libraryId: String?,
@ColumnInfo(name = "live_photo_video_id")
val livePhotoVideoId: String?,
@ColumnInfo(name = "stack_id")
val stackId: String?,
val visibility: AssetVisibility
)
@Entity(tableName = "remote_exif_entity")
data class RemoteExif(
@PrimaryKey
@ColumnInfo(name = "asset_id")
val assetId: String,
val city: String?,
val state: String?,
val country: String?,
@ColumnInfo(name = "date_time_original")
val dateTimeOriginal: Date?,
val description: String?,
val height: Int?,
val width: Int?,
@ColumnInfo(name = "exposure_time")
val exposureTime: String?,
@ColumnInfo(name = "f_number")
val fNumber: Double?,
@ColumnInfo(name = "file_size")
val fileSize: Int?,
@ColumnInfo(name = "focal_length")
val focalLength: Double?,
val latitude: Double?,
val longitude: Double?,
val iso: Int?,
val make: String?,
val model: String?,
val lens: String?,
val orientation: String?,
@ColumnInfo(name = "time_zone")
val timeZone: String?,
val rating: Int?,
@ColumnInfo(name = "projection_type")
val projectionType: String?
)
@Entity(tableName = "stack_entity")
data class Stack(
@PrimaryKey
val id: String,
@ColumnInfo(name = "created_at")
val createdAt: Date,
@ColumnInfo(name = "updated_at")
val updatedAt: Date,
@ColumnInfo(name = "owner_id")
val ownerId: String,
@ColumnInfo(name = "primary_asset_id")
val primaryAssetId: String
)
@Entity(tableName = "store_entity")
data class Store(
@PrimaryKey
val id: StoreKey,
@ColumnInfo(name = "string_value")
val stringValue: String?,
@ColumnInfo(name = "int_value")
val intValue: Int?
)
@Entity(tableName = "upload_task_entity")
data class UploadTask(
@PrimaryKey(autoGenerate = true)
val id: Long = 0,
val attempts: Int,
@ColumnInfo(name = "created_at")
val createdAt: Date,
@ColumnInfo(name = "file_path")
val filePath: URL?,
@ColumnInfo(name = "is_live_photo")
val isLivePhoto: Boolean?,
@ColumnInfo(name = "last_error")
val lastError: UploadErrorCode?,
@ColumnInfo(name = "live_photo_video_id")
val livePhotoVideoId: String?,
@ColumnInfo(name = "local_id")
val localId: String,
val method: UploadMethod,
val priority: Float,
@ColumnInfo(name = "retry_after")
val retryAfter: Date?,
val status: TaskStatus
)
// Data class for query results
data class LocalAssetTaskData(
val attempts: Int,
val checksum: String,
val createdAt: Date,
val fileName: String,
val filePath: URL?,
val isFavorite: Boolean,
val localId: String,
val priority: Float,
val taskId: Long,
val type: AssetType,
val updatedAt: Date
)
@Entity(tableName = "upload_task_stats")
data class UploadTaskStat(
@ColumnInfo(name = "pending_downloads")
val pendingDownloads: Int,
@ColumnInfo(name = "pending_uploads")
val pendingUploads: Int,
@ColumnInfo(name = "queued_downloads")
val queuedDownloads: Int,
@ColumnInfo(name = "queued_uploads")
val queuedUploads: Int,
@ColumnInfo(name = "failed_downloads")
val failedDownloads: Int,
@ColumnInfo(name = "failed_uploads")
val failedUploads: Int,
@ColumnInfo(name = "completed_uploads")
val completedUploads: Int
)
@Entity(tableName = "user_entity")
data class User(
@PrimaryKey
val id: String,
val name: String,
val email: String,
@ColumnInfo(name = "has_profile_image")
val hasProfileImage: Boolean,
@ColumnInfo(name = "profile_changed_at")
val profileChangedAt: Date,
@ColumnInfo(name = "avatar_color")
val avatarColor: AvatarColor
)
@Entity(
tableName = "user_metadata_entity",
primaryKeys = ["user_id", "key"]
)
data class UserMetadata(
@ColumnInfo(name = "user_id")
val userId: String,
val key: Date,
val value: ByteArray
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as UserMetadata
if (userId != other.userId) return false
if (key != other.key) return false
if (!value.contentEquals(other.value)) return false
return true
}
override fun hashCode(): Int {
var result = userId.hashCode()
result = 31 * result + key.hashCode()
result = 31 * result + value.contentHashCode()
return result
}
}

View File

@@ -305,7 +305,6 @@ interface NativeSyncApi {
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
fun cancelHashing()
fun getTrashedAssets(): Map<String, List<PlatformAsset>>
companion object {
/** The codec used by NativeSyncApi. */
@@ -484,21 +483,6 @@ interface NativeSyncApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$separatedMessageChannelSuffix", codec, taskQueue)
if (api != null) {
channel.setMessageHandler { _, reply ->
val wrapped: List<Any?> = try {
listOf(api.getTrashedAssets())
} catch (exception: Throwable) {
MessagesPigeonUtils.wrapError(exception)
}
reply.reply(wrapped)
}
} else {
channel.setMessageHandler(null)
}
}
}
}
}

View File

@@ -21,9 +21,4 @@ class NativeSyncApiImpl26(context: Context) : NativeSyncApiImplBase(context), Na
override fun getMediaChanges(): SyncDelta {
throw IllegalStateException("Method not supported on this Android version.")
}
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
//Method not supported on this Android version.
return emptyMap()
}
}

View File

@@ -1,9 +1,7 @@
package app.alextran.immich.sync
import android.content.ContentResolver
import android.content.Context
import android.os.Build
import android.os.Bundle
import android.provider.MediaStore
import androidx.annotation.RequiresApi
import androidx.annotation.RequiresExtension
@@ -88,29 +86,4 @@ class NativeSyncApiImpl30(context: Context) : NativeSyncApiImplBase(context), Na
// Unmounted volumes are handled in dart when the album is removed
return SyncDelta(hasChanges, changed, deleted, assetAlbums)
}
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
val result = LinkedHashMap<String, MutableList<PlatformAsset>>()
val volumes = MediaStore.getExternalVolumeNames(ctx)
for (volume in volumes) {
val queryArgs = Bundle().apply {
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, MEDIA_SELECTION)
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, MEDIA_SELECTION_ARGS)
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
}
getCursor(volume, queryArgs).use { cursor ->
getAssets(cursor).forEach { res ->
if (res is AssetResult.ValidAsset) {
result.getOrPut(res.albumId) { mutableListOf() }.add(res.asset)
}
}
}
}
return result.mapValues { it.value.toList() }
}
}

View File

@@ -4,8 +4,6 @@ import android.annotation.SuppressLint
import android.content.ContentUris
import android.content.Context
import android.database.Cursor
import android.net.Uri
import android.os.Bundle
import android.provider.MediaStore
import android.util.Base64
import androidx.core.database.getStringOrNull
@@ -83,16 +81,6 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
sortOrder,
)
protected fun getCursor(
volume: String,
queryArgs: Bundle
): Cursor? = ctx.contentResolver.query(
MediaStore.Files.getContentUri(volume),
ASSET_PROJECTION,
queryArgs,
null
)
protected fun getAssets(cursor: Cursor?): Sequence<AssetResult> {
return sequence {
cursor?.use { c ->

View File

@@ -1,54 +0,0 @@
package app.alextran.immich.upload
import android.content.Context
import android.net.ConnectivityManager
import android.net.Network
import android.net.NetworkCapabilities
import android.net.NetworkRequest
object NetworkMonitor {
@Volatile
private var isConnected = false
@Volatile
private var isWifi = false
fun initialize(context: Context) {
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
val networkRequest = NetworkRequest.Builder()
.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
.build()
connectivityManager.registerNetworkCallback(networkRequest, object : ConnectivityManager.NetworkCallback() {
override fun onAvailable(network: Network) {
isConnected = true
checkWifi(connectivityManager, network)
}
override fun onLost(network: Network) {
isConnected = false
isWifi = false
}
override fun onCapabilitiesChanged(network: Network, capabilities: NetworkCapabilities) {
checkWifi(connectivityManager, network)
}
private fun checkWifi(cm: ConnectivityManager, network: Network) {
val capabilities = cm.getNetworkCapabilities(network)
isWifi = capabilities?.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) == true
}
})
}
fun isConnected(): Boolean = isConnected
fun isWifiConnected(context: Context): Boolean {
if (!isConnected) return false
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
val capabilities = connectivityManager.getNetworkCapabilities(connectivityManager.activeNetwork)
return capabilities?.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) == true
}
}

View File

@@ -1,8 +0,0 @@
package app.alextran.immich.upload
object TaskConfig {
const val MAX_ATTEMPTS = 3
const val MAX_PENDING_DOWNLOADS = 10
const val MAX_PENDING_UPLOADS = 10
const val MAX_ACTIVE_UPLOADS = 3
}

View File

@@ -1,429 +0,0 @@
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
// See also: https://pub.dev/packages/pigeon
@file:Suppress("UNCHECKED_CAST", "ArrayInDataClass")
package app.alextran.immich.upload
import android.util.Log
import io.flutter.plugin.common.BasicMessageChannel
import io.flutter.plugin.common.BinaryMessenger
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.MessageCodec
import io.flutter.plugin.common.StandardMethodCodec
import io.flutter.plugin.common.StandardMessageCodec
import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
private object UploadTaskPigeonUtils {
fun wrapResult(result: Any?): List<Any?> {
return listOf(result)
}
fun wrapError(exception: Throwable): List<Any?> {
return if (exception is FlutterError) {
listOf(
exception.code,
exception.message,
exception.details
)
} else {
listOf(
exception.javaClass.simpleName,
exception.toString(),
"Cause: " + exception.cause + ", Stacktrace: " + Log.getStackTraceString(exception)
)
}
}
fun deepEquals(a: Any?, b: Any?): Boolean {
if (a is ByteArray && b is ByteArray) {
return a.contentEquals(b)
}
if (a is IntArray && b is IntArray) {
return a.contentEquals(b)
}
if (a is LongArray && b is LongArray) {
return a.contentEquals(b)
}
if (a is DoubleArray && b is DoubleArray) {
return a.contentEquals(b)
}
if (a is Array<*> && b is Array<*>) {
return a.size == b.size &&
a.indices.all{ deepEquals(a[it], b[it]) }
}
if (a is List<*> && b is List<*>) {
return a.size == b.size &&
a.indices.all{ deepEquals(a[it], b[it]) }
}
if (a is Map<*, *> && b is Map<*, *>) {
return a.size == b.size && a.all {
(b as Map<Any?, Any?>).containsKey(it.key) &&
deepEquals(it.value, b[it.key])
}
}
return a == b
}
}
/**
* Error class for passing custom error details to Flutter via a thrown PlatformException.
* @property code The error code.
* @property message The error message.
* @property details The error details. Must be a datatype supported by the api codec.
*/
class FlutterError (
val code: String,
override val message: String? = null,
val details: Any? = null
) : Throwable()
enum class UploadApiErrorCode(val raw: Int) {
UNKNOWN(0),
ASSET_NOT_FOUND(1),
FILE_NOT_FOUND(2),
RESOURCE_NOT_FOUND(3),
INVALID_RESOURCE(4),
ENCODING_FAILED(5),
WRITE_FAILED(6),
NOT_ENOUGH_SPACE(7),
NETWORK_ERROR(8),
PHOTOS_INTERNAL_ERROR(9),
PHOTOS_UNKNOWN_ERROR(10),
NO_SERVER_URL(11),
NO_DEVICE_ID(12),
NO_ACCESS_TOKEN(13),
INTERRUPTED(14),
CANCELLED(15),
DOWNLOAD_STALLED(16),
FORCE_QUIT(17),
OUT_OF_RESOURCES(18),
BACKGROUND_UPDATES_DISABLED(19),
UPLOAD_TIMEOUT(20),
I_CLOUD_RATE_LIMIT(21),
I_CLOUD_THROTTLED(22);
companion object {
fun ofRaw(raw: Int): UploadApiErrorCode? {
return values().firstOrNull { it.raw == raw }
}
}
}
enum class UploadApiStatus(val raw: Int) {
DOWNLOAD_PENDING(0),
DOWNLOAD_QUEUED(1),
DOWNLOAD_FAILED(2),
UPLOAD_PENDING(3),
UPLOAD_QUEUED(4),
UPLOAD_FAILED(5),
UPLOAD_COMPLETE(6),
UPLOAD_SKIPPED(7);
companion object {
fun ofRaw(raw: Int): UploadApiStatus? {
return values().firstOrNull { it.raw == raw }
}
}
}
/** Generated class from Pigeon that represents data sent in messages. */
data class UploadApiTaskStatus (
val id: String,
val filename: String,
val status: UploadApiStatus,
val errorCode: UploadApiErrorCode? = null,
val httpStatusCode: Long? = null
)
{
companion object {
fun fromList(pigeonVar_list: List<Any?>): UploadApiTaskStatus {
val id = pigeonVar_list[0] as String
val filename = pigeonVar_list[1] as String
val status = pigeonVar_list[2] as UploadApiStatus
val errorCode = pigeonVar_list[3] as UploadApiErrorCode?
val httpStatusCode = pigeonVar_list[4] as Long?
return UploadApiTaskStatus(id, filename, status, errorCode, httpStatusCode)
}
}
fun toList(): List<Any?> {
return listOf(
id,
filename,
status,
errorCode,
httpStatusCode,
)
}
override fun equals(other: Any?): Boolean {
if (other !is UploadApiTaskStatus) {
return false
}
if (this === other) {
return true
}
return UploadTaskPigeonUtils.deepEquals(toList(), other.toList()) }
override fun hashCode(): Int = toList().hashCode()
}
/** Generated class from Pigeon that represents data sent in messages. */
data class UploadApiTaskProgress (
val id: String,
val progress: Double,
val speed: Double? = null,
val totalBytes: Long? = null
)
{
companion object {
fun fromList(pigeonVar_list: List<Any?>): UploadApiTaskProgress {
val id = pigeonVar_list[0] as String
val progress = pigeonVar_list[1] as Double
val speed = pigeonVar_list[2] as Double?
val totalBytes = pigeonVar_list[3] as Long?
return UploadApiTaskProgress(id, progress, speed, totalBytes)
}
}
fun toList(): List<Any?> {
return listOf(
id,
progress,
speed,
totalBytes,
)
}
override fun equals(other: Any?): Boolean {
if (other !is UploadApiTaskProgress) {
return false
}
if (this === other) {
return true
}
return UploadTaskPigeonUtils.deepEquals(toList(), other.toList()) }
override fun hashCode(): Int = toList().hashCode()
}
private open class UploadTaskPigeonCodec : StandardMessageCodec() {
override fun readValueOfType(type: Byte, buffer: ByteBuffer): Any? {
return when (type) {
129.toByte() -> {
return (readValue(buffer) as Long?)?.let {
UploadApiErrorCode.ofRaw(it.toInt())
}
}
130.toByte() -> {
return (readValue(buffer) as Long?)?.let {
UploadApiStatus.ofRaw(it.toInt())
}
}
131.toByte() -> {
return (readValue(buffer) as? List<Any?>)?.let {
UploadApiTaskStatus.fromList(it)
}
}
132.toByte() -> {
return (readValue(buffer) as? List<Any?>)?.let {
UploadApiTaskProgress.fromList(it)
}
}
else -> super.readValueOfType(type, buffer)
}
}
override fun writeValue(stream: ByteArrayOutputStream, value: Any?) {
when (value) {
is UploadApiErrorCode -> {
stream.write(129)
writeValue(stream, value.raw)
}
is UploadApiStatus -> {
stream.write(130)
writeValue(stream, value.raw)
}
is UploadApiTaskStatus -> {
stream.write(131)
writeValue(stream, value.toList())
}
is UploadApiTaskProgress -> {
stream.write(132)
writeValue(stream, value.toList())
}
else -> super.writeValue(stream, value)
}
}
}
val UploadTaskPigeonMethodCodec = StandardMethodCodec(UploadTaskPigeonCodec())
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface UploadApi {
fun initialize(callback: (Result<Unit>) -> Unit)
fun refresh(callback: (Result<Unit>) -> Unit)
fun cancelAll(callback: (Result<Unit>) -> Unit)
fun enqueueAssets(localIds: List<String>, callback: (Result<Unit>) -> Unit)
fun enqueueFiles(paths: List<String>, callback: (Result<Unit>) -> Unit)
companion object {
/** The codec used by UploadApi. */
val codec: MessageCodec<Any?> by lazy {
UploadTaskPigeonCodec()
}
/** Sets up an instance of `UploadApi` to handle messages through the `binaryMessenger`. */
@JvmOverloads
fun setUp(binaryMessenger: BinaryMessenger, api: UploadApi?, messageChannelSuffix: String = "") {
val separatedMessageChannelSuffix = if (messageChannelSuffix.isNotEmpty()) ".$messageChannelSuffix" else ""
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.initialize$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
api.initialize{ result: Result<Unit> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(UploadTaskPigeonUtils.wrapError(error))
} else {
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.refresh$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
api.refresh{ result: Result<Unit> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(UploadTaskPigeonUtils.wrapError(error))
} else {
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.cancelAll$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
api.cancelAll{ result: Result<Unit> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(UploadTaskPigeonUtils.wrapError(error))
} else {
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueAssets$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List<Any?>
val localIdsArg = args[0] as List<String>
api.enqueueAssets(localIdsArg) { result: Result<Unit> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(UploadTaskPigeonUtils.wrapError(error))
} else {
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueFiles$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List<Any?>
val pathsArg = args[0] as List<String>
api.enqueueFiles(pathsArg) { result: Result<Unit> ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(UploadTaskPigeonUtils.wrapError(error))
} else {
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
}
}
}
} else {
channel.setMessageHandler(null)
}
}
}
}
}
private class UploadTaskPigeonStreamHandler<T>(
val wrapper: UploadTaskPigeonEventChannelWrapper<T>
) : EventChannel.StreamHandler {
var pigeonSink: PigeonEventSink<T>? = null
override fun onListen(p0: Any?, sink: EventChannel.EventSink) {
pigeonSink = PigeonEventSink<T>(sink)
wrapper.onListen(p0, pigeonSink!!)
}
override fun onCancel(p0: Any?) {
pigeonSink = null
wrapper.onCancel(p0)
}
}
interface UploadTaskPigeonEventChannelWrapper<T> {
open fun onListen(p0: Any?, sink: PigeonEventSink<T>) {}
open fun onCancel(p0: Any?) {}
}
class PigeonEventSink<T>(private val sink: EventChannel.EventSink) {
fun success(value: T) {
sink.success(value)
}
fun error(errorCode: String, errorMessage: String?, errorDetails: Any?) {
sink.error(errorCode, errorMessage, errorDetails)
}
fun endOfStream() {
sink.endOfStream()
}
}
abstract class StreamStatusStreamHandler : UploadTaskPigeonEventChannelWrapper<UploadApiTaskStatus> {
companion object {
fun register(messenger: BinaryMessenger, streamHandler: StreamStatusStreamHandler, instanceName: String = "") {
var channelName: String = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamStatus"
if (instanceName.isNotEmpty()) {
channelName += ".$instanceName"
}
val internalStreamHandler = UploadTaskPigeonStreamHandler<UploadApiTaskStatus>(streamHandler)
EventChannel(messenger, channelName, UploadTaskPigeonMethodCodec).setStreamHandler(internalStreamHandler)
}
}
}
abstract class StreamProgressStreamHandler : UploadTaskPigeonEventChannelWrapper<UploadApiTaskProgress> {
companion object {
fun register(messenger: BinaryMessenger, streamHandler: StreamProgressStreamHandler, instanceName: String = "") {
var channelName: String = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamProgress"
if (instanceName.isNotEmpty()) {
channelName += ".$instanceName"
}
val internalStreamHandler = UploadTaskPigeonStreamHandler<UploadApiTaskProgress>(streamHandler)
EventChannel(messenger, channelName, UploadTaskPigeonMethodCodec).setStreamHandler(internalStreamHandler)
}
}
}

View File

@@ -1,175 +0,0 @@
package app.alextran.immich.upload
import android.content.Context
import androidx.work.*
import app.alextran.immich.schema.AppDatabase
import app.alextran.immich.schema.AssetType
import app.alextran.immich.schema.StorageType
import app.alextran.immich.schema.StoreKey
import app.alextran.immich.schema.TaskStatus
import app.alextran.immich.schema.UploadMethod
import app.alextran.immich.schema.UploadTask
import kotlinx.coroutines.*
import kotlinx.coroutines.guava.await
import java.util.Date
import java.util.concurrent.TimeUnit
// TODO: this is almost entirely LLM-generated (ported from Swift), need to verify behavior
class UploadTaskImpl(context: Context) : UploadApi {
private val ctx: Context = context.applicationContext
private val db: AppDatabase = AppDatabase.getDatabase(ctx)
private val workManager: WorkManager = WorkManager.getInstance(ctx)
@Volatile
private var isInitialized = false
private val scope = CoroutineScope(SupervisorJob() + Dispatchers.Default)
override fun initialize(callback: (Result<Unit>) -> Unit) {
scope.launch {
try {
// Clean up orphaned tasks
val activeWorkInfos = workManager.getWorkInfosByTag(UPLOAD_WORK_TAG).await()
val activeTaskIds = activeWorkInfos
.filter { it.state == WorkInfo.State.RUNNING || it.state == WorkInfo.State.ENQUEUED }
.mapNotNull {
it.tags.find { tag -> tag.startsWith("task_") }?.substringAfter("task_")?.toLongOrNull()
}
.toSet()
db.uploadTaskDao().run {
withContext(Dispatchers.IO) {
// Find tasks marked as queued but not actually running
val dbQueuedIds = getTaskIdsByStatus(
listOf(
TaskStatus.DOWNLOAD_QUEUED,
TaskStatus.UPLOAD_QUEUED,
TaskStatus.UPLOAD_PENDING
)
)
val orphanIds = dbQueuedIds.filterNot { it in activeTaskIds }
if (orphanIds.isNotEmpty()) {
resetOrphanedTasks(orphanIds)
}
}
}
// Clean up temp files
val tempDir = getTempDirectory()
tempDir.deleteRecursively()
isInitialized = true
startBackup()
withContext(Dispatchers.Main) {
callback(Result.success(Unit))
}
} catch (e: Exception) {
withContext(Dispatchers.Main) {
callback(Result.failure(e))
}
}
}
}
override fun refresh(callback: (Result<Unit>) -> Unit) {
scope.launch {
try {
startBackup()
withContext(Dispatchers.Main) {
callback(Result.success(Unit))
}
} catch (e: Exception) {
withContext(Dispatchers.Main) {
callback(Result.failure(e))
}
}
}
}
private suspend fun startBackup() {
if (!isInitialized) return
withContext(Dispatchers.IO) {
try {
// Check if backup is enabled
val backupEnabled = db.storeDao().get(StoreKey.enableBackup, StorageType.BoolStorage)
if (backupEnabled != true) return@withContext
// Get upload statistics
val stats = db.uploadTaskStatDao().getStats() ?: return@withContext
val availableSlots = TaskConfig.MAX_PENDING_UPLOADS + TaskConfig.MAX_PENDING_DOWNLOADS -
(stats.pendingDownloads + stats.queuedDownloads + stats.pendingUploads + stats.queuedUploads)
if (availableSlots <= 0) return@withContext
// Find candidate assets for backup
val candidates = db.localAssetDao().getCandidatesForBackup(availableSlots)
if (candidates.isEmpty()) return@withContext
// Create upload tasks for candidates
db.uploadTaskDao().insertAll(candidates.map { candidate ->
UploadTask(
attempts = 0,
createdAt = Date(),
filePath = null,
isLivePhoto = null,
lastError = null,
livePhotoVideoId = null,
localId = candidate.id,
method = UploadMethod.MULTIPART,
priority = when (candidate.type) {
AssetType.IMAGE -> 0.5f
else -> 0.3f
},
retryAfter = null,
status = TaskStatus.UPLOAD_PENDING
)
})
// Start upload workers
enqueueUploadWorkers()
} catch (e: Exception) {
android.util.Log.e(TAG, "Backup queue error", e)
}
}
}
private fun enqueueUploadWorkers() {
// Create constraints
val constraints = Constraints.Builder()
.setRequiredNetworkType(NetworkType.CONNECTED)
.build()
// Create work request
val uploadWorkRequest = OneTimeWorkRequestBuilder<UploadWorker>()
.setConstraints(constraints)
.addTag(UPLOAD_WORK_TAG)
.setBackoffCriteria(
BackoffPolicy.EXPONENTIAL,
WorkRequest.MIN_BACKOFF_MILLIS,
TimeUnit.MILLISECONDS
)
.build()
workManager.enqueueUniqueWork(
UPLOAD_WORK_NAME,
ExistingWorkPolicy.KEEP,
uploadWorkRequest
)
}
private fun getTempDirectory(): java.io.File {
return java.io.File(ctx.cacheDir, "upload_temp").apply {
if (!exists()) mkdirs()
}
}
companion object {
private const val TAG = "UploadTaskImpl"
private const val UPLOAD_WORK_TAG = "immich_upload"
private const val UPLOAD_WORK_NAME = "immich_upload_unique"
}
}

View File

@@ -1,265 +0,0 @@
package app.alextran.immich.upload
import android.content.Context
import android.provider.MediaStore
import androidx.work.*
import app.alextran.immich.schema.AppDatabase
import app.alextran.immich.schema.AssetType
import app.alextran.immich.schema.LocalAssetTaskData
import app.alextran.immich.schema.StorageType
import app.alextran.immich.schema.StoreKey
import app.alextran.immich.schema.TaskStatus
import app.alextran.immich.schema.UploadErrorCode
import kotlinx.coroutines.*
import okhttp3.*
import okhttp3.MediaType.Companion.toMediaType
import java.io.File
import java.io.IOException
import java.net.URL
import java.util.*
import java.util.concurrent.TimeUnit
class UploadWorker(
context: Context,
params: WorkerParameters
) : CoroutineWorker(context, params) {
private val db = AppDatabase.getDatabase(applicationContext)
private val client = createOkHttpClient()
override suspend fun doWork(): Result = withContext(Dispatchers.IO) {
try {
// Check if backup is enabled
val backupEnabled = db.storeDao().get(StoreKey.enableBackup, StorageType.BoolStorage)
if (backupEnabled != true) {
return@withContext Result.success()
}
// Get pending upload tasks
val tasks = db.uploadTaskDao().getTasksForUpload(TaskConfig.MAX_ACTIVE_UPLOADS)
if (tasks.isEmpty()) {
return@withContext Result.success()
}
// Process tasks concurrently
val results = tasks.map { task ->
async { processUploadTask(task) }
}.awaitAll()
// Check if we should continue processing
val hasMore = db.uploadTaskDao().hasPendingTasks()
if (hasMore) {
// Schedule next batch
enqueueNextBatch()
}
// Determine result based on processing outcomes
when {
results.all { it } -> Result.success()
results.any { it } -> Result.success() // Partial success
else -> Result.retry()
}
} catch (e: Exception) {
android.util.Log.e(TAG, "Upload worker error", e)
Result.retry()
}
}
private suspend fun processUploadTask(task: LocalAssetTaskData): Boolean {
return try {
// Get asset from MediaStore
val assetUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI
.buildUpon()
.appendPath(task.localId)
.build()
val cursor = applicationContext.contentResolver.query(
assetUri,
arrayOf(MediaStore.Images.Media.DATA),
null,
null,
null
) ?: return handleFailure(task, UploadErrorCode.ASSET_NOT_FOUND)
val filePath = cursor.use {
if (it.moveToFirst()) {
it.getString(it.getColumnIndexOrThrow(MediaStore.Images.Media.DATA))
} else null
} ?: return handleFailure(task, UploadErrorCode.ASSET_NOT_FOUND)
val file = File(filePath)
if (!file.exists()) {
return handleFailure(task, UploadErrorCode.FILE_NOT_FOUND)
}
// Get server configuration
val serverUrl = db.storeDao().get(StoreKey.serverEndpoint, StorageType.UrlStorage)
?: return handleFailure(task, UploadErrorCode.NO_SERVER_URL)
val accessToken = db.storeDao().get(StoreKey.accessToken, StorageType.StringStorage)
?: return handleFailure(task, UploadErrorCode.NO_ACCESS_TOKEN)
val deviceId = db.storeDao().get(StoreKey.deviceId, StorageType.StringStorage)
?: return handleFailure(task, UploadErrorCode.NO_DEVICE_ID)
// Check network constraints
val useWifiOnly = when (task.type) {
AssetType.IMAGE -> db.storeDao().get(StoreKey.useWifiForUploadPhotos, StorageType.BoolStorage) ?: false
AssetType.VIDEO -> db.storeDao().get(StoreKey.useWifiForUploadVideos, StorageType.BoolStorage) ?: false
else -> false
}
if (useWifiOnly && !NetworkMonitor.isWifiConnected(applicationContext)) {
// Wait for WiFi
return true
}
// Update task status
db.uploadTaskDao().updateStatus(task.taskId, TaskStatus.UPLOAD_QUEUED)
// Perform upload
uploadFile(task, file, serverUrl, accessToken, deviceId)
// Mark as complete
db.uploadTaskDao().updateStatus(task.taskId, TaskStatus.UPLOAD_COMPLETE)
true
} catch (e: Exception) {
android.util.Log.e(TAG, "Upload task ${task.taskId} failed", e)
handleFailure(task, UploadErrorCode.UNKNOWN)
}
}
private suspend fun uploadFile(
task: LocalAssetTaskData,
file: File,
serverUrl: URL,
accessToken: String,
deviceId: String
) {
val requestBody = createMultipartBody(task, file, deviceId)
val request = Request.Builder()
.url("${serverUrl}/api/upload")
.post(requestBody)
.header("x-immich-user-token", accessToken)
.tag(task.taskId)
.build()
client.newCall(request).execute().use { response ->
if (!response.isSuccessful) {
throw IOException("Upload failed: ${response.code}")
}
}
}
private fun createMultipartBody(
task: LocalAssetTaskData,
file: File,
deviceId: String
): RequestBody {
val boundary = "Boundary-${UUID.randomUUID()}"
return object : RequestBody() {
override fun contentType() = "multipart/form-data; boundary=$boundary".toMediaType()
override fun writeTo(sink: okio.BufferedSink) {
// Write form fields
writeFormField(sink, boundary, "deviceAssetId", task.localId)
writeFormField(sink, boundary, "deviceId", deviceId)
writeFormField(sink, boundary, "fileCreatedAt", (task.createdAt.time / 1000).toString())
writeFormField(sink, boundary, "fileModifiedAt", (task.updatedAt.time / 1000).toString())
writeFormField(sink, boundary, "fileName", task.fileName)
writeFormField(sink, boundary, "isFavorite", task.isFavorite.toString())
// Write file
sink.writeUtf8("--$boundary\r\n")
sink.writeUtf8("Content-Disposition: form-data; name=\"assetData\"; filename=\"asset\"\r\n")
sink.writeUtf8("Content-Type: application/octet-stream\r\n\r\n")
file.inputStream().use { input ->
val buffer = ByteArray(8192)
var bytesRead: Int
while (input.read(buffer).also { bytesRead = it } != -1) {
sink.write(buffer, 0, bytesRead)
// Report progress (simplified - could be enhanced with listeners)
setProgressAsync(
workDataOf(
PROGRESS_TASK_ID to task.taskId,
PROGRESS_BYTES to file.length()
)
)
}
}
sink.writeUtf8("\r\n--$boundary--\r\n")
}
private fun writeFormField(sink: okio.BufferedSink, boundary: String, name: String, value: String) {
sink.writeUtf8("--$boundary\r\n")
sink.writeUtf8("Content-Disposition: form-data; name=\"$name\"\r\n\r\n")
sink.writeUtf8(value)
sink.writeUtf8("\r\n")
}
}
}
private suspend fun handleFailure(task: LocalAssetTaskData, code: UploadErrorCode): Boolean {
val newAttempts = task.attempts + 1
val status = if (newAttempts >= TaskConfig.MAX_ATTEMPTS) {
TaskStatus.UPLOAD_FAILED
} else {
TaskStatus.UPLOAD_PENDING
}
val retryAfter = if (status == TaskStatus.UPLOAD_PENDING) {
Date(System.currentTimeMillis() + (Math.pow(3.0, newAttempts.toDouble()) * 1000).toLong())
} else null
db.uploadTaskDao().updateTaskAfterFailure(
task.taskId,
newAttempts,
code,
status,
retryAfter
)
return false
}
private fun enqueueNextBatch() {
val constraints = Constraints.Builder()
.setRequiredNetworkType(NetworkType.CONNECTED)
.build()
val nextWorkRequest = OneTimeWorkRequestBuilder<UploadWorker>()
.setConstraints(constraints)
.addTag(UPLOAD_WORK_TAG)
.setInitialDelay(1, TimeUnit.SECONDS)
.build()
WorkManager.getInstance(applicationContext)
.enqueueUniqueWork(
UPLOAD_WORK_NAME,
ExistingWorkPolicy.KEEP,
nextWorkRequest
)
}
private fun createOkHttpClient(): OkHttpClient {
return OkHttpClient.Builder()
.connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(300, TimeUnit.SECONDS)
.writeTimeout(300, TimeUnit.SECONDS)
.build()
}
companion object {
private const val TAG = "UploadWorker"
private const val UPLOAD_WORK_TAG = "immich_upload"
private const val UPLOAD_WORK_NAME = "immich_upload_unique"
const val PROGRESS_TASK_ID = "progress_task_id"
const val PROGRESS_BYTES = "progress_bytes"
}
}

View File

@@ -36,3 +36,4 @@ tasks.register("clean", Delete) {
tasks.named('wrapper') {
distributionType = Wrapper.DistributionType.ALL
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -32,6 +32,7 @@ target 'Runner' do
use_modular_headers!
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
# share_handler addition start
target 'ShareExtension' do
inherit! :search_paths

View File

@@ -88,9 +88,9 @@ PODS:
- Flutter
- FlutterMacOS
- SAMKeychain (1.5.3)
- SDWebImage (5.21.3):
- SDWebImage/Core (= 5.21.3)
- SDWebImage/Core (5.21.3)
- SDWebImage (5.21.0):
- SDWebImage/Core (= 5.21.0)
- SDWebImage/Core (5.21.0)
- share_handler_ios (0.0.14):
- Flutter
- share_handler_ios/share_handler_ios_models (= 0.0.14)
@@ -107,16 +107,16 @@ PODS:
- sqflite_darwin (0.0.4):
- Flutter
- FlutterMacOS
- sqlite3 (3.49.2):
- sqlite3/common (= 3.49.2)
- sqlite3/common (3.49.2)
- sqlite3/dbstatvtab (3.49.2):
- sqlite3 (3.49.1):
- sqlite3/common (= 3.49.1)
- sqlite3/common (3.49.1)
- sqlite3/dbstatvtab (3.49.1):
- sqlite3/common
- sqlite3/fts5 (3.49.2):
- sqlite3/fts5 (3.49.1):
- sqlite3/common
- sqlite3/perf-threadsafe (3.49.2):
- sqlite3/perf-threadsafe (3.49.1):
- sqlite3/common
- sqlite3/rtree (3.49.2):
- sqlite3/rtree (3.49.1):
- sqlite3/common
- sqlite3_flutter_libs (0.0.1):
- Flutter
@@ -275,18 +275,18 @@ SPEC CHECKSUMS:
permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d
photo_manager: 1d80ae07a89a67dfbcae95953a1e5a24af7c3e62
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
SDWebImage: 16309af6d214ba3f77a7c6f6fdda888cb313a50a
SDWebImage: f84b0feeb08d2d11e6a9b843cb06d75ebf5b8868
share_handler_ios: e2244e990f826b2c8eaa291ac3831569438ba0fb
share_handler_ios_models: fc638c9b4330dc7f082586c92aee9dfa0b87b871
share_plus: 50da8cb520a8f0f65671c6c6a99b3617ed10a58a
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
sqlite3: 3c950dc86011117c307eb0b28c4a7bb449dce9f1
sqlite3: fc1400008a9b3525f5914ed715a5d1af0b8f4983
sqlite3_flutter_libs: f8fc13346870e73fe35ebf6dbb997fbcd156b241
SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4
url_launcher_ios: 694010445543906933d732453a59da0a173ae33d
wakelock_plus: e29112ab3ef0b318e58cfa5c32326458be66b556
PODFILE CHECKSUM: 95621706d175fee669455a5946a602e2a775019c
PODFILE CHECKSUM: 7ce312f2beab01395db96f6969d90a447279cf45
COCOAPODS: 1.16.2

View File

@@ -29,13 +29,9 @@
FAC6F89B2D287C890078CB2F /* ShareExtension.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = FAC6F8902D287C890078CB2F /* ShareExtension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
FAC6F8B72D287F120078CB2F /* ShareViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = FAC6F8B52D287F120078CB2F /* ShareViewController.swift */; };
FAC6F8B92D287F120078CB2F /* MainInterface.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FAC6F8B32D287F120078CB2F /* MainInterface.storyboard */; };
FE30A0D02ECF97B8007AFDD7 /* Algorithms in Frameworks */ = {isa = PBXBuildFile; productRef = FE30A0CF2ECF97B8007AFDD7 /* Algorithms */; };
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
FEE084F82EC172460045228E /* SQLiteData in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084F72EC172460045228E /* SQLiteData */; };
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */; };
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FC2EC1725A0045228E /* StructuredFieldValues */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -78,16 +74,6 @@
name = "Embed Foundation Extensions";
runOnlyForDeploymentPostprocessing = 0;
};
FE4C52462EAFE736009EEB47 /* Embed ExtensionKit Extensions */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "$(EXTENSIONS_FOLDER_PATH)";
dstSubfolderSpec = 16;
files = (
);
name = "Embed ExtensionKit Extensions";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
@@ -147,11 +133,15 @@
/* Begin PBXFileSystemSynchronizedRootGroup section */
B231F52D2E93A44A00BC45D1 /* Core */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Core;
sourceTree = "<group>";
};
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Sync;
sourceTree = "<group>";
};
@@ -163,21 +153,6 @@
path = WidgetExtension;
sourceTree = "<group>";
};
FE14355D2EC446E90009D5AC /* Upload */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = Upload;
sourceTree = "<group>";
};
FEB3BA112EBD52860081A5EB /* Schemas */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = Schemas;
sourceTree = "<group>";
};
FEE084F22EC172080045228E /* Schemas */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = Schemas;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
@@ -185,10 +160,6 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
FEE084F82EC172460045228E /* SQLiteData in Frameworks */,
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */,
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */,
FE30A0D02ECF97B8007AFDD7 /* Algorithms in Frameworks */,
D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
@@ -283,9 +254,6 @@
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
FE14355D2EC446E90009D5AC /* Upload */,
FEE084F22EC172080045228E /* Schemas */,
FEB3BA112EBD52860081A5EB /* Schemas */,
B231F52D2E93A44A00BC45D1 /* Core */,
B25D37792E72CA15008B6CA7 /* Connectivity */,
B21E34A62E5AF9760031FDB9 /* Background */,
@@ -363,7 +331,6 @@
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
D218A34AEE62BC1EF119F5B0 /* [CP] Embed Pods Frameworks */,
6724EEB7D74949FA08581154 /* [CP] Copy Pods Resources */,
FE4C52462EAFE736009EEB47 /* Embed ExtensionKit Extensions */,
);
buildRules = (
);
@@ -374,9 +341,6 @@
fileSystemSynchronizedGroups = (
B231F52D2E93A44A00BC45D1 /* Core */,
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
FE14355D2EC446E90009D5AC /* Upload */,
FEB3BA112EBD52860081A5EB /* Schemas */,
FEE084F22EC172080045228E /* Schemas */,
);
name = Runner;
productName = Runner;
@@ -428,7 +392,7 @@
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = YES;
LastSwiftUpdateCheck = 1620;
LastSwiftUpdateCheck = 1640;
LastUpgradeCheck = 1510;
ORGANIZATIONNAME = "";
TargetAttributes = {
@@ -455,11 +419,6 @@
Base,
);
mainGroup = 97C146E51CF9000F007C117D;
packageReferences = (
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */,
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */,
FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */,
);
preferredProjectObjectVersion = 77;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = "";
@@ -571,14 +530,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
@@ -607,14 +562,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
@@ -765,7 +716,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
@@ -774,8 +725,7 @@
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.121.0;
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -D DEBUG -Xllvm -sil-disable-pass=performance-linker";
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.profile;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile;
PRODUCT_NAME = "Immich-Profile";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
@@ -910,7 +860,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
@@ -919,8 +869,7 @@
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.121.0;
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -D DEBUG -Xllvm -sil-disable-pass=performance-linker";
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug;
PRODUCT_NAME = "Immich-Debug";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
@@ -941,7 +890,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
@@ -950,8 +899,7 @@
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.121.0;
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -Xllvm -sil-disable-pass=performance-linker";
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich;
PRODUCT_NAME = Immich;
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
@@ -975,7 +923,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -992,7 +940,7 @@
MARKETING_VERSION = 1.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug.Widget;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug.Widget;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
@@ -1018,7 +966,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1034,7 +982,7 @@
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 1.0;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.Widget;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.Widget;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_EMIT_LOC_STRINGS = YES;
@@ -1058,7 +1006,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1074,7 +1022,7 @@
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 1.0;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.profile.Widget;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile.Widget;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_EMIT_LOC_STRINGS = YES;
@@ -1098,7 +1046,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1115,7 +1063,7 @@
MARKETING_VERSION = 1.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug.ShareExtension;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug.ShareExtension;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SKIP_INSTALL = YES;
@@ -1142,7 +1090,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1158,7 +1106,7 @@
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 1.0;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.ShareExtension;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.ShareExtension;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SKIP_INSTALL = YES;
@@ -1183,7 +1131,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 233;
CUSTOM_GROUP_ID = group.app.immich.share;
DEVELOPMENT_TEAM = 33MF3D8ZGA;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1199,7 +1147,7 @@
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 1.0;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = app.mertakev.immich.profile.ShareExtension;
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile.ShareExtension;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SKIP_INSTALL = YES;
@@ -1253,56 +1201,6 @@
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */
FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/apple/swift-algorithms.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.2.1;
};
};
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/pointfreeco/sqlite-data";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.3.0;
};
};
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/apple/swift-http-structured-headers.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.5.0;
};
};
/* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
FE30A0CF2ECF97B8007AFDD7 /* Algorithms */ = {
isa = XCSwiftPackageProductDependency;
package = FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */;
productName = Algorithms;
};
FEE084F72EC172460045228E /* SQLiteData */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */;
productName = SQLiteData;
};
FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
productName = RawStructuredFieldValues;
};
FEE084FC2EC1725A0045228E /* StructuredFieldValues */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
productName = StructuredFieldValues;
};
/* End XCSwiftPackageProductDependency section */
};
rootObject = 97C146E61CF9000F007C117D /* Project object */;
}

View File

@@ -1,177 +0,0 @@
{
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
"pins" : [
{
"identity" : "combine-schedulers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/combine-schedulers",
"state" : {
"revision" : "fd16d76fd8b9a976d88bfb6cacc05ca8d19c91b6",
"version" : "1.1.0"
}
},
{
"identity" : "grdb.swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/groue/GRDB.swift",
"state" : {
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
"version" : "7.8.0"
}
},
{
"identity" : "opencombine",
"kind" : "remoteSourceControl",
"location" : "https://github.com/OpenCombine/OpenCombine.git",
"state" : {
"revision" : "8576f0d579b27020beccbccc3ea6844f3ddfc2c2",
"version" : "0.14.0"
}
},
{
"identity" : "sqlite-data",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/sqlite-data",
"state" : {
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
"version" : "1.3.0"
}
},
{
"identity" : "swift-case-paths",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-case-paths",
"state" : {
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
"version" : "1.7.2"
}
},
{
"identity" : "swift-clocks",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-clocks",
"state" : {
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
"version" : "1.0.6"
}
},
{
"identity" : "swift-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-collections",
"state" : {
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
"version" : "1.3.0"
}
},
{
"identity" : "swift-concurrency-extras",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
"state" : {
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
"version" : "1.3.2"
}
},
{
"identity" : "swift-custom-dump",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-custom-dump",
"state" : {
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
"version" : "1.3.3"
}
},
{
"identity" : "swift-dependencies",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-dependencies",
"state" : {
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
"version" : "1.10.0"
}
},
{
"identity" : "swift-http-structured-headers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-http-structured-headers.git",
"state" : {
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
"version" : "1.5.0"
}
},
{
"identity" : "swift-identified-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-identified-collections",
"state" : {
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
"version" : "1.1.1"
}
},
{
"identity" : "swift-perception",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-perception",
"state" : {
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
"version" : "2.0.9"
}
},
{
"identity" : "swift-sharing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-sharing",
"state" : {
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
"version" : "2.7.4"
}
},
{
"identity" : "swift-snapshot-testing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
"state" : {
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
"version" : "1.18.7"
}
},
{
"identity" : "swift-structured-queries",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-structured-queries",
"state" : {
"revision" : "9c84335373bae5f5c9f7b5f0adf3ae10f2cab5b9",
"version" : "0.25.2"
}
},
{
"identity" : "swift-syntax",
"kind" : "remoteSourceControl",
"location" : "https://github.com/swiftlang/swift-syntax",
"state" : {
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
"version" : "602.0.0"
}
},
{
"identity" : "swift-tagged",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-tagged",
"state" : {
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
"version" : "0.10.0"
}
},
{
"identity" : "xctest-dynamic-overlay",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
"state" : {
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
"version" : "1.7.0"
}
}
],
"version" : 3
}

View File

@@ -1,150 +0,0 @@
{
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
"pins" : [
{
"identity" : "combine-schedulers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/combine-schedulers",
"state" : {
"revision" : "5928286acce13def418ec36d05a001a9641086f2",
"version" : "1.0.3"
}
},
{
"identity" : "grdb.swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/groue/GRDB.swift",
"state" : {
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
"version" : "7.8.0"
}
},
{
"identity" : "sqlite-data",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/sqlite-data",
"state" : {
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
"version" : "1.3.0"
}
},
{
"identity" : "swift-clocks",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-clocks",
"state" : {
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
"version" : "1.0.6"
}
},
{
"identity" : "swift-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-collections",
"state" : {
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
"version" : "1.3.0"
}
},
{
"identity" : "swift-concurrency-extras",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
"state" : {
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
"version" : "1.3.2"
}
},
{
"identity" : "swift-custom-dump",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-custom-dump",
"state" : {
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
"version" : "1.3.3"
}
},
{
"identity" : "swift-dependencies",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-dependencies",
"state" : {
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
"version" : "1.10.0"
}
},
{
"identity" : "swift-http-structured-headers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-http-structured-headers.git",
"state" : {
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
"version" : "1.5.0"
}
},
{
"identity" : "swift-identified-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-identified-collections",
"state" : {
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
"version" : "1.1.1"
}
},
{
"identity" : "swift-perception",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-perception",
"state" : {
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
"version" : "2.0.9"
}
},
{
"identity" : "swift-sharing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-sharing",
"state" : {
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
"version" : "2.7.4"
}
},
{
"identity" : "swift-snapshot-testing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
"state" : {
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
"version" : "1.18.7"
}
},
{
"identity" : "swift-structured-queries",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-structured-queries",
"state" : {
"revision" : "9c84335373bae5f5c9f7b5f0adf3ae10f2cab5b9",
"version" : "0.25.2"
}
},
{
"identity" : "swift-syntax",
"kind" : "remoteSourceControl",
"location" : "https://github.com/swiftlang/swift-syntax",
"state" : {
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
"version" : "602.0.0"
}
},
{
"identity" : "xctest-dynamic-overlay",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
"state" : {
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
"version" : "1.7.0"
}
}
],
"version" : 3
}

View File

@@ -1,11 +1,11 @@
import BackgroundTasks
import Flutter
import UIKit
import network_info_plus
import path_provider_foundation
import permission_handler_apple
import photo_manager
import shared_preferences_foundation
import UIKit
@main
@objc class AppDelegate: FlutterAppDelegate {
@@ -15,7 +15,7 @@ import shared_preferences_foundation
) -> Bool {
// Required for flutter_local_notification
if #available(iOS 10.0, *) {
UNUserNotificationCenter.current().delegate = self
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
}
GeneratedPluginRegistrant.register(with: self)
@@ -36,9 +36,7 @@ import shared_preferences_foundation
}
if !registry.hasPlugin("org.cocoapods.shared-preferences-foundation") {
SharedPreferencesPlugin.register(
with: registry.registrar(forPlugin: "org.cocoapods.shared-preferences-foundation")!
)
SharedPreferencesPlugin.register(with: registry.registrar(forPlugin: "org.cocoapods.shared-preferences-foundation")!)
}
if !registry.hasPlugin("org.cocoapods.permission-handler-apple") {
@@ -52,18 +50,14 @@ import shared_preferences_foundation
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
public static func registerPlugins(with engine: FlutterEngine) {
NativeSyncApiImpl.register(with: engine.registrar(forPlugin: NativeSyncApiImpl.name)!)
ThumbnailApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: ThumbnailApiImpl())
BackgroundWorkerFgHostApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: BackgroundWorkerApiImpl())
let statusListener = StatusEventListener()
StreamStatusStreamHandler.register(with: engine.binaryMessenger, streamHandler: statusListener)
let progressListener = ProgressEventListener()
StreamProgressStreamHandler.register(with: engine.binaryMessenger, streamHandler: progressListener)
UploadApiSetup.setUp(
binaryMessenger: engine.binaryMessenger,
api: UploadApiImpl(statusListener: statusListener, progressListener: progressListener)
)
}
public static func cancelPlugins(with engine: FlutterEngine) {
(engine.valuePublished(byPlugin: NativeSyncApiImpl.name) as? NativeSyncApiImpl)?.detachFromEngine()
}
}

View File

@@ -350,12 +350,16 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
// If we have required Wi-Fi, we can check the isExpensive property
let requireWifi = defaults.value(forKey: "require_wifi") as? Bool ?? false
// The network is expensive and we have required Wi-Fi
// Therefore, we will simply complete the task without
// running it
if (requireWifi && NetworkMonitor.shared.isExpensive) {
return task.setTaskCompleted(success: true)
if (requireWifi) {
let wifiMonitor = NWPathMonitor(requiredInterfaceType: .wifi)
let isExpensive = wifiMonitor.currentPath.isExpensive
if (isExpensive) {
// The network is expensive and we have required Wi-Fi
// Therefore, we will simply complete the task without
// running it
task.setTaskCompleted(success: true)
return
}
}
// Schedule the next sync task so we can run this again later

View File

@@ -1,24 +1,17 @@
class ImmichPlugin: NSObject {
var detached: Bool
override init() {
detached = false
super.init()
}
func detachFromEngine() {
self.detached = true
}
func completeWhenActive<T>(for completion: @escaping (T) -> Void, with value: T) {
guard !self.detached else { return }
completion(value)
}
}
@inline(__always)
func dPrint(_ item: Any) {
#if DEBUG
print(item)
#endif
}

View File

@@ -9,6 +9,8 @@
<key>com.apple.developer.networking.wifi-info</key>
<true/>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.app.immich.share</string>
</array>
</dict>
</plist>

View File

@@ -11,6 +11,8 @@
<key>com.apple.developer.networking.wifi-info</key>
<true/>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.app.immich.share</string>
</array>
</dict>
</plist>

View File

@@ -1,272 +0,0 @@
import SQLiteData
extension Notification.Name {
static let networkDidConnect = Notification.Name("networkDidConnect")
}
enum TaskConfig {
static let maxActiveDownloads = 3
static let maxPendingDownloads = 50
static let maxPendingUploads = 50
static let maxRetries = 10
static let sessionId = "app.mertalev.immich.upload"
static let downloadCheckIntervalNs: UInt64 = 30_000_000_000 // 30 seconds
static let downloadTimeoutS = TimeInterval(60)
static let transferSpeedAlpha = 0.4
static let originalsDir = FileManager.default.temporaryDirectory.appendingPathComponent(
"originals",
isDirectory: true
)
}
enum StoreKey: Int, CaseIterable, QueryBindable {
// MARK: - Int
case _version = 0
static let version = Typed<Int>(rawValue: ._version)
case _deviceIdHash = 3
static let deviceIdHash = Typed<Int>(rawValue: ._deviceIdHash)
case _backupTriggerDelay = 8
static let backupTriggerDelay = Typed<Int>(rawValue: ._backupTriggerDelay)
case _tilesPerRow = 103
static let tilesPerRow = Typed<Int>(rawValue: ._tilesPerRow)
case _groupAssetsBy = 105
static let groupAssetsBy = Typed<Int>(rawValue: ._groupAssetsBy)
case _uploadErrorNotificationGracePeriod = 106
static let uploadErrorNotificationGracePeriod = Typed<Int>(rawValue: ._uploadErrorNotificationGracePeriod)
case _thumbnailCacheSize = 110
static let thumbnailCacheSize = Typed<Int>(rawValue: ._thumbnailCacheSize)
case _imageCacheSize = 111
static let imageCacheSize = Typed<Int>(rawValue: ._imageCacheSize)
case _albumThumbnailCacheSize = 112
static let albumThumbnailCacheSize = Typed<Int>(rawValue: ._albumThumbnailCacheSize)
case _selectedAlbumSortOrder = 113
static let selectedAlbumSortOrder = Typed<Int>(rawValue: ._selectedAlbumSortOrder)
case _logLevel = 115
static let logLevel = Typed<Int>(rawValue: ._logLevel)
case _mapRelativeDate = 119
static let mapRelativeDate = Typed<Int>(rawValue: ._mapRelativeDate)
case _mapThemeMode = 124
static let mapThemeMode = Typed<Int>(rawValue: ._mapThemeMode)
// MARK: - String
case _assetETag = 1
static let assetETag = Typed<String>(rawValue: ._assetETag)
case _currentUser = 2
static let currentUser = Typed<String>(rawValue: ._currentUser)
case _deviceId = 4
static let deviceId = Typed<String>(rawValue: ._deviceId)
case _accessToken = 11
static let accessToken = Typed<String>(rawValue: ._accessToken)
case _sslClientCertData = 15
static let sslClientCertData = Typed<String>(rawValue: ._sslClientCertData)
case _sslClientPasswd = 16
static let sslClientPasswd = Typed<String>(rawValue: ._sslClientPasswd)
case _themeMode = 102
static let themeMode = Typed<String>(rawValue: ._themeMode)
case _customHeaders = 127
static let customHeaders = Typed<[String: String]>(rawValue: ._customHeaders)
case _primaryColor = 128
static let primaryColor = Typed<String>(rawValue: ._primaryColor)
case _preferredWifiName = 133
static let preferredWifiName = Typed<String>(rawValue: ._preferredWifiName)
// MARK: - Endpoint
case _externalEndpointList = 135
static let externalEndpointList = Typed<[Endpoint]>(rawValue: ._externalEndpointList)
// MARK: - URL
case _serverUrl = 10
static let serverUrl = Typed<URL>(rawValue: ._serverUrl)
case _serverEndpoint = 12
static let serverEndpoint = Typed<URL>(rawValue: ._serverEndpoint)
case _localEndpoint = 134
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
// MARK: - Date
case _backupFailedSince = 5
static let backupFailedSince = Typed<Date>(rawValue: ._backupFailedSince)
// MARK: - Bool
case _backupRequireWifi = 6
static let backupRequireWifi = Typed<Bool>(rawValue: ._backupRequireWifi)
case _backupRequireCharging = 7
static let backupRequireCharging = Typed<Bool>(rawValue: ._backupRequireCharging)
case _autoBackup = 13
static let autoBackup = Typed<Bool>(rawValue: ._autoBackup)
case _backgroundBackup = 14
static let backgroundBackup = Typed<Bool>(rawValue: ._backgroundBackup)
case _loadPreview = 100
static let loadPreview = Typed<Bool>(rawValue: ._loadPreview)
case _loadOriginal = 101
static let loadOriginal = Typed<Bool>(rawValue: ._loadOriginal)
case _dynamicLayout = 104
static let dynamicLayout = Typed<Bool>(rawValue: ._dynamicLayout)
case _backgroundBackupTotalProgress = 107
static let backgroundBackupTotalProgress = Typed<Bool>(rawValue: ._backgroundBackupTotalProgress)
case _backgroundBackupSingleProgress = 108
static let backgroundBackupSingleProgress = Typed<Bool>(rawValue: ._backgroundBackupSingleProgress)
case _storageIndicator = 109
static let storageIndicator = Typed<Bool>(rawValue: ._storageIndicator)
case _advancedTroubleshooting = 114
static let advancedTroubleshooting = Typed<Bool>(rawValue: ._advancedTroubleshooting)
case _preferRemoteImage = 116
static let preferRemoteImage = Typed<Bool>(rawValue: ._preferRemoteImage)
case _loopVideo = 117
static let loopVideo = Typed<Bool>(rawValue: ._loopVideo)
case _mapShowFavoriteOnly = 118
static let mapShowFavoriteOnly = Typed<Bool>(rawValue: ._mapShowFavoriteOnly)
case _selfSignedCert = 120
static let selfSignedCert = Typed<Bool>(rawValue: ._selfSignedCert)
case _mapIncludeArchived = 121
static let mapIncludeArchived = Typed<Bool>(rawValue: ._mapIncludeArchived)
case _ignoreIcloudAssets = 122
static let ignoreIcloudAssets = Typed<Bool>(rawValue: ._ignoreIcloudAssets)
case _selectedAlbumSortReverse = 123
static let selectedAlbumSortReverse = Typed<Bool>(rawValue: ._selectedAlbumSortReverse)
case _mapwithPartners = 125
static let mapwithPartners = Typed<Bool>(rawValue: ._mapwithPartners)
case _enableHapticFeedback = 126
static let enableHapticFeedback = Typed<Bool>(rawValue: ._enableHapticFeedback)
case _dynamicTheme = 129
static let dynamicTheme = Typed<Bool>(rawValue: ._dynamicTheme)
case _colorfulInterface = 130
static let colorfulInterface = Typed<Bool>(rawValue: ._colorfulInterface)
case _syncAlbums = 131
static let syncAlbums = Typed<Bool>(rawValue: ._syncAlbums)
case _autoEndpointSwitching = 132
static let autoEndpointSwitching = Typed<Bool>(rawValue: ._autoEndpointSwitching)
case _loadOriginalVideo = 136
static let loadOriginalVideo = Typed<Bool>(rawValue: ._loadOriginalVideo)
case _manageLocalMediaAndroid = 137
static let manageLocalMediaAndroid = Typed<Bool>(rawValue: ._manageLocalMediaAndroid)
case _readonlyModeEnabled = 138
static let readonlyModeEnabled = Typed<Bool>(rawValue: ._readonlyModeEnabled)
case _autoPlayVideo = 139
static let autoPlayVideo = Typed<Bool>(rawValue: ._autoPlayVideo)
case _photoManagerCustomFilter = 1000
static let photoManagerCustomFilter = Typed<Bool>(rawValue: ._photoManagerCustomFilter)
case _betaPromptShown = 1001
static let betaPromptShown = Typed<Bool>(rawValue: ._betaPromptShown)
case _betaTimeline = 1002
static let betaTimeline = Typed<Bool>(rawValue: ._betaTimeline)
case _enableBackup = 1003
static let enableBackup = Typed<Bool>(rawValue: ._enableBackup)
case _useWifiForUploadVideos = 1004
static let useWifiForUploadVideos = Typed<Bool>(rawValue: ._useWifiForUploadVideos)
case _useWifiForUploadPhotos = 1005
static let useWifiForUploadPhotos = Typed<Bool>(rawValue: ._useWifiForUploadPhotos)
case _needBetaMigration = 1006
static let needBetaMigration = Typed<Bool>(rawValue: ._needBetaMigration)
case _shouldResetSync = 1007
static let shouldResetSync = Typed<Bool>(rawValue: ._shouldResetSync)
struct Typed<T>: RawRepresentable {
let rawValue: StoreKey
@_transparent
init(rawValue value: StoreKey) {
self.rawValue = value
}
}
}
enum UploadHeaders: String {
case reprDigest = "Repr-Digest"
case userToken = "X-Immich-User-Token"
case assetData = "X-Immich-Asset-Data"
}
enum TaskStatus: Int, QueryBindable {
case downloadPending, downloadQueued, downloadFailed, uploadPending, uploadQueued, uploadFailed, uploadComplete,
uploadSkipped
}
enum BackupSelection: Int, QueryBindable {
case selected, none, excluded
}
enum AvatarColor: Int, QueryBindable {
case primary, pink, red, yellow, blue, green, purple, orange, gray, amber
}
enum AlbumUserRole: Int, QueryBindable {
case editor, viewer
}
enum MemoryType: Int, QueryBindable {
case onThisDay
}
enum AssetVisibility: Int, QueryBindable {
case timeline, hidden, archive, locked
}
enum SourceType: String, QueryBindable {
case machineLearning = "machine-learning"
case exif, manual
}
enum UploadMethod: Int, QueryBindable {
case multipart, resumable
}
enum UploadError: Error {
case fileCreationFailed
case iCloudError(UploadErrorCode)
case photosError(UploadErrorCode)
}
enum UploadErrorCode: Int, QueryBindable {
case unknown
case assetNotFound
case fileNotFound
case resourceNotFound
case invalidResource
case encodingFailed
case writeFailed
case notEnoughSpace
case networkError
case photosInternalError
case photosUnknownError
case noServerUrl
case noDeviceId
case noAccessToken
case interrupted
case cancelled
case downloadStalled
case forceQuit
case outOfResources
case backgroundUpdatesDisabled
case uploadTimeout
case iCloudRateLimit
case iCloudThrottled
case invalidResponse
case badRequest
case internalServerError
}
enum AssetType: Int, QueryBindable {
case other, image, video, audio
}
enum AssetMediaStatus: String, Codable {
case created, replaced, duplicate
}
struct Endpoint: Codable {
let url: URL
let status: Status
enum Status: String, Codable {
case loading, valid, error, unknown
}
}
struct UploadSuccessResponse: Codable {
let status: AssetMediaStatus
let id: String
}
struct UploadErrorResponse: Codable {
let message: String
}

View File

@@ -1,160 +0,0 @@
import SQLiteData
enum StoreError: Error {
case invalidJSON(String)
case invalidURL(String)
case encodingFailed
case notFound
}
protocol StoreConvertible {
static var cacheKeyPath: ReferenceWritableKeyPath<StoreCache, [StoreKey: Self]> { get }
associatedtype StorageType
static func fromValue(_ value: StorageType) throws(StoreError) -> Self
static func toValue(_ value: Self) throws(StoreError) -> StorageType
}
extension StoreConvertible {
static func get(_ cache: StoreCache, key: StoreKey) -> Self? {
os_unfair_lock_lock(&cache.lock)
defer { os_unfair_lock_unlock(&cache.lock) }
return cache[keyPath: cacheKeyPath][key]
}
static func set(_ cache: StoreCache, key: StoreKey, value: Self?) {
os_unfair_lock_lock(&cache.lock)
defer { os_unfair_lock_unlock(&cache.lock) }
cache[keyPath: cacheKeyPath][key] = value
}
}
final class StoreCache {
fileprivate var lock = os_unfair_lock()
fileprivate var intCache: [StoreKey: Int] = [:]
fileprivate var boolCache: [StoreKey: Bool] = [:]
fileprivate var dateCache: [StoreKey: Date] = [:]
fileprivate var stringCache: [StoreKey: String] = [:]
fileprivate var urlCache: [StoreKey: URL] = [:]
fileprivate var endpointArrayCache: [StoreKey: [Endpoint]] = [:]
fileprivate var stringDictCache: [StoreKey: [String: String]] = [:]
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) -> T? {
T.get(self, key: key.rawValue)
}
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T?) {
T.set(self, key: key.rawValue, value: value)
}
}
extension Int: StoreConvertible {
static let cacheKeyPath = \StoreCache.intCache
static func fromValue(_ value: Int) -> Int { value }
static func toValue(_ value: Int) -> Int { value }
}
extension Bool: StoreConvertible {
static let cacheKeyPath = \StoreCache.boolCache
static func fromValue(_ value: Int) -> Bool { value == 1 }
static func toValue(_ value: Bool) -> Int { value ? 1 : 0 }
}
extension Date: StoreConvertible {
static let cacheKeyPath = \StoreCache.dateCache
static func fromValue(_ value: Int) -> Date { Date(timeIntervalSince1970: TimeInterval(value) / 1000) }
static func toValue(_ value: Date) -> Int { Int(value.timeIntervalSince1970 * 1000) }
}
extension String: StoreConvertible {
static let cacheKeyPath = \StoreCache.stringCache
static func fromValue(_ value: String) -> String { value }
static func toValue(_ value: String) -> String { value }
}
extension URL: StoreConvertible {
static let cacheKeyPath = \StoreCache.urlCache
static func fromValue(_ value: String) throws(StoreError) -> URL {
guard let url = URL(string: value) else {
throw StoreError.invalidURL(value)
}
return url
}
static func toValue(_ value: URL) -> String { value.absoluteString }
}
extension StoreConvertible where Self: Codable, StorageType == String {
static var jsonDecoder: JSONDecoder { JSONDecoder() }
static var jsonEncoder: JSONEncoder { JSONEncoder() }
static func fromValue(_ value: String) throws(StoreError) -> Self {
do {
return try jsonDecoder.decode(Self.self, from: Data(value.utf8))
} catch {
throw StoreError.invalidJSON(value)
}
}
static func toValue(_ value: Self) throws(StoreError) -> String {
let encoded: Data
do {
encoded = try jsonEncoder.encode(value)
} catch {
throw StoreError.encodingFailed
}
guard let string = String(data: encoded, encoding: .utf8) else {
throw StoreError.encodingFailed
}
return string
}
}
extension Array: StoreConvertible where Element == Endpoint {
static let cacheKeyPath = \StoreCache.endpointArrayCache
typealias StorageType = String
}
extension Dictionary: StoreConvertible where Key == String, Value == String {
static let cacheKeyPath = \StoreCache.stringDictCache
typealias StorageType = String
}
extension Store {
static let cache = StoreCache()
static func get<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>) throws -> T?
where T.StorageType == Int {
if let cached = cache.get(key) { return cached }
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
if let value = try query.fetchOne(conn) ?? nil {
let converted = try T.fromValue(value)
cache.set(key, value: converted)
}
return nil
}
static func get<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>) throws -> T?
where T.StorageType == String {
if let cached = cache.get(key) { return cached }
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
if let value = try query.fetchOne(conn) ?? nil {
let converted = try T.fromValue(value)
cache.set(key, value: converted)
}
return nil
}
static func set<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>, value: T) throws
where T.StorageType == Int {
let converted = try T.toValue(value)
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: converted) }.execute(conn)
cache.set(key, value: value)
}
static func set<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>, value: T) throws
where T.StorageType == String {
let converted = try T.toValue(value)
try Store.upsert { Store(id: key.rawValue, stringValue: converted, intValue: nil) }.execute(conn)
cache.set(key, value: value)
}
}

View File

@@ -1,457 +0,0 @@
import SQLiteData
extension QueryExpression where QueryValue: _OptionalProtocol {
// asserts column result cannot be nil
var unwrapped: SQLQueryExpression<QueryValue.Wrapped> {
SQLQueryExpression(self.queryFragment, as: QueryValue.Wrapped.self)
}
}
extension Date {
var unixTime: Date.UnixTimeRepresentation {
return Date.UnixTimeRepresentation(queryOutput: self)
}
}
@Table("asset_face_entity")
struct AssetFace: Identifiable {
let id: String
@Column("asset_id")
let assetId: RemoteAsset.ID
@Column("person_id")
let personId: Person.ID?
@Column("image_width")
let imageWidth: Int
@Column("image_height")
let imageHeight: Int
@Column("bounding_box_x1")
let boundingBoxX1: Int
@Column("bounding_box_y1")
let boundingBoxY1: Int
@Column("bounding_box_x2")
let boundingBoxX2: Int
@Column("bounding_box_y2")
let boundingBoxY2: Int
@Column("source_type")
let sourceType: SourceType
}
@Table("auth_user_entity")
struct AuthUser: Identifiable {
let id: String
let name: String
let email: String
@Column("is_admin")
let isAdmin: Bool
@Column("has_profile_image")
let hasProfileImage: Bool
@Column("profile_changed_at")
let profileChangedAt: Date
@Column("avatar_color")
let avatarColor: AvatarColor
@Column("quota_size_in_bytes")
let quotaSizeInBytes: Int
@Column("quota_usage_in_bytes")
let quotaUsageInBytes: Int
@Column("pin_code")
let pinCode: String?
}
@Table("local_album_entity")
struct LocalAlbum: Identifiable {
let id: String
@Column("backup_selection")
let backupSelection: BackupSelection
@Column("linked_remote_album_id")
let linkedRemoteAlbumId: RemoteAlbum.ID?
@Column("marker")
let marker_: Bool?
let name: String
@Column("is_ios_shared_album")
let isIosSharedAlbum: Bool
@Column("updated_at")
let updatedAt: Date
}
extension LocalAlbum {
static let selected = Self.where { $0.backupSelection.eq(BackupSelection.selected) }
static let excluded = Self.where { $0.backupSelection.eq(BackupSelection.excluded) }
}
@Table("local_album_asset_entity")
struct LocalAlbumAsset {
let id: ID
@Column("marker")
let marker_: String?
@Selection
struct ID {
@Column("asset_id")
let assetId: String
@Column("album_id")
let albumId: String
}
}
extension LocalAlbumAsset {
static let selected = Self.where {
$0.id.assetId.eq(LocalAsset.columns.id) && $0.id.albumId.in(LocalAlbum.selected.select(\.id))
}
static let excluded = Self.where {
$0.id.assetId.eq(LocalAsset.columns.id) && $0.id.albumId.in(LocalAlbum.excluded.select(\.id))
}
/// Get all asset ids that are only in this album and not in other albums.
/// This is useful in cases where the album is a smart album or a user-created album, especially on iOS
static func uniqueAssetIds(albumId: String) -> Select<String, Self, ()> {
return Self.select(\.id.assetId)
.where { laa in
laa.id.albumId.eq(albumId)
&& !LocalAlbumAsset.where { $0.id.assetId.eq(laa.id.assetId) && $0.id.albumId.neq(albumId) }.exists()
}
}
}
@Table("local_asset_entity")
struct LocalAsset: Identifiable {
let id: String
let checksum: String?
@Column("created_at")
let createdAt: String
@Column("duration_in_seconds")
let durationInSeconds: Int64?
let height: Int?
@Column("is_favorite")
let isFavorite: Bool
let name: String
let orientation: String
let type: AssetType
@Column("updated_at")
let updatedAt: String
let width: Int?
static func getCandidates() -> Where<LocalAsset> {
return Self.where { local in
LocalAlbumAsset.selected.exists()
&& !LocalAlbumAsset.excluded.exists()
&& !RemoteAsset.where {
local.checksum.eq($0.checksum)
&& $0.ownerId.eq(Store.select(\.stringValue).where { $0.id.eq(StoreKey.currentUser.rawValue) }.unwrapped)
}.exists()
&& !UploadTask.where { $0.localId.eq(local.id) }.exists()
}
}
}
@Selection
struct LocalAssetCandidate {
let id: LocalAsset.ID
let type: AssetType
}
@Selection
struct LocalAssetDownloadData {
let checksum: String?
let createdAt: String
let livePhotoVideoId: RemoteAsset.ID?
let localId: LocalAsset.ID
let taskId: UploadTask.ID
let updatedAt: String
}
@Selection
struct LocalAssetUploadData {
let filePath: URL
let priority: Float
let taskId: UploadTask.ID
let type: AssetType
}
@Table("memory_asset_entity")
struct MemoryAsset {
let id: ID
@Selection
struct ID {
@Column("asset_id")
let assetId: String
@Column("album_id")
let albumId: String
}
}
@Table("memory_entity")
struct Memory: Identifiable {
let id: String
@Column("created_at")
let createdAt: Date
@Column("updated_at")
let updatedAt: Date
@Column("deleted_at")
let deletedAt: Date?
@Column("owner_id")
let ownerId: User.ID
let type: MemoryType
let data: String
@Column("is_saved")
let isSaved: Bool
@Column("memory_at")
let memoryAt: Date
@Column("seen_at")
let seenAt: Date?
@Column("show_at")
let showAt: Date?
@Column("hide_at")
let hideAt: Date?
}
@Table("partner_entity")
struct Partner {
let id: ID
@Column("in_timeline")
let inTimeline: Bool
@Selection
struct ID {
@Column("shared_by_id")
let sharedById: String
@Column("shared_with_id")
let sharedWithId: String
}
}
@Table("person_entity")
struct Person: Identifiable {
let id: String
@Column("created_at")
let createdAt: Date
@Column("updated_at")
let updatedAt: Date
@Column("owner_id")
let ownerId: String
let name: String
@Column("face_asset_id")
let faceAssetId: AssetFace.ID?
@Column("is_favorite")
let isFavorite: Bool
@Column("is_hidden")
let isHidden: Bool
let color: String?
@Column("birth_date")
let birthDate: Date?
}
@Table("remote_album_entity")
struct RemoteAlbum: Identifiable {
let id: String
@Column("created_at")
let createdAt: Date
let description: String?
@Column("is_activity_enabled")
let isActivityEnabled: Bool
let name: String
let order: Int
@Column("owner_id")
let ownerId: String
@Column("thumbnail_asset_id")
let thumbnailAssetId: RemoteAsset.ID?
@Column("updated_at")
let updatedAt: Date
}
@Table("remote_album_asset_entity")
struct RemoteAlbumAsset {
let id: ID
@Selection
struct ID {
@Column("asset_id")
let assetId: String
@Column("album_id")
let albumId: String
}
}
@Table("remote_album_user_entity")
struct RemoteAlbumUser {
let id: ID
let role: AlbumUserRole
@Selection
struct ID {
@Column("album_id")
let albumId: String
@Column("user_id")
let userId: String
}
}
@Table("remote_asset_entity")
struct RemoteAsset: Identifiable {
let id: String
let checksum: String
@Column("is_favorite")
let isFavorite: Bool
@Column("deleted_at")
let deletedAt: Date?
@Column("owner_id")
let ownerId: User.ID
@Column("local_date_time")
let localDateTime: Date?
@Column("thumb_hash")
let thumbHash: String?
@Column("library_id")
let libraryId: String?
@Column("live_photo_video_id")
let livePhotoVideoId: String?
@Column("stack_id")
let stackId: Stack.ID?
let visibility: AssetVisibility
}
@Table("remote_exif_entity")
struct RemoteExif {
@Column("asset_id", primaryKey: true)
let assetId: RemoteAsset.ID
let city: String?
let state: String?
let country: String?
@Column("date_time_original")
let dateTimeOriginal: Date?
let description: String?
let height: Int?
let width: Int?
@Column("exposure_time")
let exposureTime: String?
@Column("f_number")
let fNumber: Double?
@Column("file_size")
let fileSize: Int?
@Column("focal_length")
let focalLength: Double?
let latitude: Double?
let longitude: Double?
let iso: Int?
let make: String?
let model: String?
let lens: String?
let orientation: String?
@Column("time_zone")
let timeZone: String?
let rating: Int?
@Column("projection_type")
let projectionType: String?
}
@Table("stack_entity")
struct Stack: Identifiable {
let id: String
@Column("created_at")
let createdAt: Date
@Column("updated_at")
let updatedAt: Date
@Column("owner_id")
let ownerId: User.ID
@Column("primary_asset_id")
let primaryAssetId: String
}
@Table("store_entity")
struct Store: Identifiable {
let id: StoreKey
@Column("string_value")
let stringValue: String?
@Column("int_value")
let intValue: Int?
}
@Table("upload_tasks")
struct UploadTask: Identifiable {
let id: Int64
let attempts: Int
@Column("created_at", as: Date.UnixTimeRepresentation.self)
let createdAt: Date
@Column("file_path")
var filePath: URL?
@Column("is_live_photo")
let isLivePhoto: Bool?
@Column("last_error")
let lastError: UploadErrorCode?
@Column("live_photo_video_id")
let livePhotoVideoId: RemoteAsset.ID?
@Column("local_id")
var localId: LocalAsset.ID?
let method: UploadMethod
var priority: Float
@Column("retry_after", as: Date?.UnixTimeRepresentation.self)
let retryAfter: Date?
let status: TaskStatus
static func retryOrFail(code: UploadErrorCode, status: TaskStatus) -> Update<UploadTask, ()> {
return Self.update { row in
row.status = Case().when(row.attempts.lte(TaskConfig.maxRetries), then: TaskStatus.downloadPending).else(status)
row.attempts += 1
row.lastError = code
row.retryAfter = #sql("unixepoch('now') + (\(4 << row.attempts))")
}
}
}
@Table("upload_task_stats")
struct UploadTaskStat {
@Column("pending_downloads")
let pendingDownloads: Int
@Column("pending_uploads")
let pendingUploads: Int
@Column("queued_downloads")
let queuedDownloads: Int
@Column("queued_uploads")
let queuedUploads: Int
@Column("failed_downloads")
let failedDownloads: Int
@Column("failed_uploads")
let failedUploads: Int
@Column("completed_uploads")
let completedUploads: Int
@Column("skipped_uploads")
let skippedUploads: Int
static let availableDownloadSlots = Self.select {
TaskConfig.maxPendingDownloads - ($0.pendingDownloads + $0.queuedDownloads)
}
static let availableUploadSlots = Self.select {
TaskConfig.maxPendingUploads - ($0.pendingUploads + $0.queuedUploads)
}
static let availableSlots = Self.select {
TaskConfig.maxPendingUploads + TaskConfig.maxPendingDownloads
- ($0.pendingDownloads + $0.queuedDownloads + $0.pendingUploads + $0.queuedUploads)
}
}
@Table("user_entity")
struct User: Identifiable {
let id: String
let name: String
let email: String
@Column("has_profile_image")
let hasProfileImage: Bool
@Column("profile_changed_at")
let profileChangedAt: Date
@Column("avatar_color")
let avatarColor: AvatarColor
}
@Table("user_metadata_entity")
struct UserMetadata {
let id: ID
let value: Data
@Selection
struct ID {
@Column("user_id")
let userId: String
let key: Date
}
}

View File

@@ -0,0 +1,536 @@
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
// See also: https://pub.dev/packages/pigeon
import Foundation
#if os(iOS)
import Flutter
#elseif os(macOS)
import FlutterMacOS
#else
#error("Unsupported platform.")
#endif
/// Error class for passing custom error details to Dart side.
final class PigeonError: Error {
let code: String
let message: String?
let details: Sendable?
init(code: String, message: String?, details: Sendable?) {
self.code = code
self.message = message
self.details = details
}
var localizedDescription: String {
return
"PigeonError(code: \(code), message: \(message ?? "<nil>"), details: \(details ?? "<nil>")"
}
}
private func wrapResult(_ result: Any?) -> [Any?] {
return [result]
}
private func wrapError(_ error: Any) -> [Any?] {
if let pigeonError = error as? PigeonError {
return [
pigeonError.code,
pigeonError.message,
pigeonError.details,
]
}
if let flutterError = error as? FlutterError {
return [
flutterError.code,
flutterError.message,
flutterError.details,
]
}
return [
"\(error)",
"\(type(of: error))",
"Stacktrace: \(Thread.callStackSymbols)",
]
}
private func isNullish(_ value: Any?) -> Bool {
return value is NSNull || value == nil
}
private func nilOrValue<T>(_ value: Any?) -> T? {
if value is NSNull { return nil }
return value as! T?
}
func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool {
let cleanLhs = nilOrValue(lhs) as Any?
let cleanRhs = nilOrValue(rhs) as Any?
switch (cleanLhs, cleanRhs) {
case (nil, nil):
return true
case (nil, _), (_, nil):
return false
case is (Void, Void):
return true
case let (cleanLhsHashable, cleanRhsHashable) as (AnyHashable, AnyHashable):
return cleanLhsHashable == cleanRhsHashable
case let (cleanLhsArray, cleanRhsArray) as ([Any?], [Any?]):
guard cleanLhsArray.count == cleanRhsArray.count else { return false }
for (index, element) in cleanLhsArray.enumerated() {
if !deepEqualsMessages(element, cleanRhsArray[index]) {
return false
}
}
return true
case let (cleanLhsDictionary, cleanRhsDictionary) as ([AnyHashable: Any?], [AnyHashable: Any?]):
guard cleanLhsDictionary.count == cleanRhsDictionary.count else { return false }
for (key, cleanLhsValue) in cleanLhsDictionary {
guard cleanRhsDictionary.index(forKey: key) != nil else { return false }
if !deepEqualsMessages(cleanLhsValue, cleanRhsDictionary[key]!) {
return false
}
}
return true
default:
// Any other type shouldn't be able to be used with pigeon. File an issue if you find this to be untrue.
return false
}
}
func deepHashMessages(value: Any?, hasher: inout Hasher) {
if let valueList = value as? [AnyHashable] {
for item in valueList { deepHashMessages(value: item, hasher: &hasher) }
return
}
if let valueDict = value as? [AnyHashable: AnyHashable] {
for key in valueDict.keys {
hasher.combine(key)
deepHashMessages(value: valueDict[key]!, hasher: &hasher)
}
return
}
if let hashableValue = value as? AnyHashable {
hasher.combine(hashableValue.hashValue)
}
return hasher.combine(String(describing: value))
}
/// Generated class from Pigeon that represents data sent in messages.
struct PlatformAsset: Hashable {
var id: String
var name: String
var type: Int64
var createdAt: Int64? = nil
var updatedAt: Int64? = nil
var width: Int64? = nil
var height: Int64? = nil
var durationInSeconds: Int64
var orientation: Int64
var isFavorite: Bool
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformAsset? {
let id = pigeonVar_list[0] as! String
let name = pigeonVar_list[1] as! String
let type = pigeonVar_list[2] as! Int64
let createdAt: Int64? = nilOrValue(pigeonVar_list[3])
let updatedAt: Int64? = nilOrValue(pigeonVar_list[4])
let width: Int64? = nilOrValue(pigeonVar_list[5])
let height: Int64? = nilOrValue(pigeonVar_list[6])
let durationInSeconds = pigeonVar_list[7] as! Int64
let orientation = pigeonVar_list[8] as! Int64
let isFavorite = pigeonVar_list[9] as! Bool
return PlatformAsset(
id: id,
name: name,
type: type,
createdAt: createdAt,
updatedAt: updatedAt,
width: width,
height: height,
durationInSeconds: durationInSeconds,
orientation: orientation,
isFavorite: isFavorite
)
}
func toList() -> [Any?] {
return [
id,
name,
type,
createdAt,
updatedAt,
width,
height,
durationInSeconds,
orientation,
isFavorite,
]
}
static func == (lhs: PlatformAsset, rhs: PlatformAsset) -> Bool {
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
}
/// Generated class from Pigeon that represents data sent in messages.
struct PlatformAlbum: Hashable {
var id: String
var name: String
var updatedAt: Int64? = nil
var isCloud: Bool
var assetCount: Int64
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformAlbum? {
let id = pigeonVar_list[0] as! String
let name = pigeonVar_list[1] as! String
let updatedAt: Int64? = nilOrValue(pigeonVar_list[2])
let isCloud = pigeonVar_list[3] as! Bool
let assetCount = pigeonVar_list[4] as! Int64
return PlatformAlbum(
id: id,
name: name,
updatedAt: updatedAt,
isCloud: isCloud,
assetCount: assetCount
)
}
func toList() -> [Any?] {
return [
id,
name,
updatedAt,
isCloud,
assetCount,
]
}
static func == (lhs: PlatformAlbum, rhs: PlatformAlbum) -> Bool {
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
}
/// Generated class from Pigeon that represents data sent in messages.
struct SyncDelta: Hashable {
var hasChanges: Bool
var updates: [PlatformAsset]
var deletes: [String]
var assetAlbums: [String: [String]]
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> SyncDelta? {
let hasChanges = pigeonVar_list[0] as! Bool
let updates = pigeonVar_list[1] as! [PlatformAsset]
let deletes = pigeonVar_list[2] as! [String]
let assetAlbums = pigeonVar_list[3] as! [String: [String]]
return SyncDelta(
hasChanges: hasChanges,
updates: updates,
deletes: deletes,
assetAlbums: assetAlbums
)
}
func toList() -> [Any?] {
return [
hasChanges,
updates,
deletes,
assetAlbums,
]
}
static func == (lhs: SyncDelta, rhs: SyncDelta) -> Bool {
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
}
/// Generated class from Pigeon that represents data sent in messages.
struct HashResult: Hashable {
var assetId: String
var error: String? = nil
var hash: String? = nil
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> HashResult? {
let assetId = pigeonVar_list[0] as! String
let error: String? = nilOrValue(pigeonVar_list[1])
let hash: String? = nilOrValue(pigeonVar_list[2])
return HashResult(
assetId: assetId,
error: error,
hash: hash
)
}
func toList() -> [Any?] {
return [
assetId,
error,
hash,
]
}
static func == (lhs: HashResult, rhs: HashResult) -> Bool {
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
}
private class MessagesPigeonCodecReader: FlutterStandardReader {
override func readValue(ofType type: UInt8) -> Any? {
switch type {
case 129:
return PlatformAsset.fromList(self.readValue() as! [Any?])
case 130:
return PlatformAlbum.fromList(self.readValue() as! [Any?])
case 131:
return SyncDelta.fromList(self.readValue() as! [Any?])
case 132:
return HashResult.fromList(self.readValue() as! [Any?])
default:
return super.readValue(ofType: type)
}
}
}
private class MessagesPigeonCodecWriter: FlutterStandardWriter {
override func writeValue(_ value: Any) {
if let value = value as? PlatformAsset {
super.writeByte(129)
super.writeValue(value.toList())
} else if let value = value as? PlatformAlbum {
super.writeByte(130)
super.writeValue(value.toList())
} else if let value = value as? SyncDelta {
super.writeByte(131)
super.writeValue(value.toList())
} else if let value = value as? HashResult {
super.writeByte(132)
super.writeValue(value.toList())
} else {
super.writeValue(value)
}
}
}
private class MessagesPigeonCodecReaderWriter: FlutterStandardReaderWriter {
override func reader(with data: Data) -> FlutterStandardReader {
return MessagesPigeonCodecReader(data: data)
}
override func writer(with data: NSMutableData) -> FlutterStandardWriter {
return MessagesPigeonCodecWriter(data: data)
}
}
class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable {
static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter())
}
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol NativeSyncApi {
func shouldFullSync() throws -> Bool
func getMediaChanges() throws -> SyncDelta
func checkpointSync() throws
func clearSyncCheckpoint() throws
func getAssetIdsForAlbum(albumId: String) throws -> [String]
func getAlbums() throws -> [PlatformAlbum]
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
func cancelHashing() throws
}
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
class NativeSyncApiSetup {
static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared }
/// Sets up an instance of `NativeSyncApi` to handle messages through the `binaryMessenger`.
static func setUp(binaryMessenger: FlutterBinaryMessenger, api: NativeSyncApi?, messageChannelSuffix: String = "") {
let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : ""
#if os(iOS)
let taskQueue = binaryMessenger.makeBackgroundTaskQueue?()
#else
let taskQueue: FlutterTaskQueue? = nil
#endif
let shouldFullSyncChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.shouldFullSync\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
shouldFullSyncChannel.setMessageHandler { _, reply in
do {
let result = try api.shouldFullSync()
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
shouldFullSyncChannel.setMessageHandler(nil)
}
let getMediaChangesChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getMediaChanges\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getMediaChanges\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getMediaChangesChannel.setMessageHandler { _, reply in
do {
let result = try api.getMediaChanges()
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getMediaChangesChannel.setMessageHandler(nil)
}
let checkpointSyncChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.checkpointSync\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
checkpointSyncChannel.setMessageHandler { _, reply in
do {
try api.checkpointSync()
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
checkpointSyncChannel.setMessageHandler(nil)
}
let clearSyncCheckpointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.clearSyncCheckpoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
clearSyncCheckpointChannel.setMessageHandler { _, reply in
do {
try api.clearSyncCheckpoint()
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
clearSyncCheckpointChannel.setMessageHandler(nil)
}
let getAssetIdsForAlbumChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetIdsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetIdsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getAssetIdsForAlbumChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let albumIdArg = args[0] as! String
do {
let result = try api.getAssetIdsForAlbum(albumId: albumIdArg)
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getAssetIdsForAlbumChannel.setMessageHandler(nil)
}
let getAlbumsChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAlbums\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAlbums\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getAlbumsChannel.setMessageHandler { _, reply in
do {
let result = try api.getAlbums()
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getAlbumsChannel.setMessageHandler(nil)
}
let getAssetsCountSinceChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsCountSince\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsCountSince\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getAssetsCountSinceChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let albumIdArg = args[0] as! String
let timestampArg = args[1] as! Int64
do {
let result = try api.getAssetsCountSince(albumId: albumIdArg, timestamp: timestampArg)
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getAssetsCountSinceChannel.setMessageHandler(nil)
}
let getAssetsForAlbumChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getAssetsForAlbumChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let albumIdArg = args[0] as! String
let updatedTimeCondArg: Int64? = nilOrValue(args[1])
do {
let result = try api.getAssetsForAlbum(albumId: albumIdArg, updatedTimeCond: updatedTimeCondArg)
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getAssetsForAlbumChannel.setMessageHandler(nil)
}
let hashAssetsChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.hashAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.hashAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
hashAssetsChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let assetIdsArg = args[0] as! [String]
let allowNetworkAccessArg = args[1] as! Bool
api.hashAssets(assetIds: assetIdsArg, allowNetworkAccess: allowNetworkAccessArg) { result in
switch result {
case .success(let res):
reply(wrapResult(res))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
hashAssetsChannel.setMessageHandler(nil)
}
let cancelHashingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
cancelHashingChannel.setMessageHandler { _, reply in
do {
try api.cancelHashing()
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
cancelHashingChannel.setMessageHandler(nil)
}
}
}

View File

@@ -1,580 +1,389 @@
import Algorithms
import CryptoKit
import Foundation
import Photos
import SQLiteData
import os.log
import CryptoKit
extension Notification.Name {
static let localSyncDidComplete = Notification.Name("localSyncDidComplete")
}
enum LocalSyncError: Error {
case photoAccessDenied, assetUpsertFailed, noChangeToken, unsupportedOS
case unsupportedAssetType(Int)
}
enum SyncConfig {
static let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
static let batchSize: Int = 5000
static let changeTokenKey = "immich:changeToken"
static let recoveredAlbumSubType = 1_000_000_219
static let sortDescriptors = [NSSortDescriptor(key: "localIdentifier", ascending: true)]
}
class LocalSyncService {
private static let dateFormatter = ISO8601DateFormatter()
private let defaults: UserDefaults
private let db: DatabasePool
private let photoLibrary: PhotoLibraryProvider
private let logger = Logger(subsystem: "com.immich.mobile", category: "LocalSync")
init(db: DatabasePool, photoLibrary: PhotoLibraryProvider, with defaults: UserDefaults = .standard) {
self.defaults = defaults
self.db = db
self.photoLibrary = photoLibrary
struct AssetWrapper: Hashable, Equatable {
let asset: PlatformAsset
init(with asset: PlatformAsset) {
self.asset = asset
}
func hash(into hasher: inout Hasher) {
hasher.combine(self.asset.id)
}
static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool {
return lhs.asset.id == rhs.asset.id
}
}
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
static let name = "NativeSyncApi"
static func register(with registrar: any FlutterPluginRegistrar) {
let instance = NativeSyncApiImpl()
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
registrar.publish(instance)
}
func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
super.detachFromEngine()
}
private let defaults: UserDefaults
private let changeTokenKey = "immich:changeToken"
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
private let recoveredAlbumSubType = 1000000219
private var hashTask: Task<Void?, Error>?
private static let hashCancelledCode = "HASH_CANCELLED"
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
init(with defaults: UserDefaults = .standard) {
self.defaults = defaults
}
@available(iOS 16, *)
private func getChangeToken() -> PHPersistentChangeToken? {
defaults.data(forKey: SyncConfig.changeTokenKey)
.flatMap { try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: $0) }
guard let data = defaults.data(forKey: changeTokenKey) else {
return nil
}
return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data)
}
@available(iOS 16, *)
private func saveChangeToken(token: PHPersistentChangeToken) {
private func saveChangeToken(token: PHPersistentChangeToken) -> Void {
guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else {
return
}
defaults.set(data, forKey: SyncConfig.changeTokenKey)
defaults.set(data, forKey: changeTokenKey)
}
func clearSyncCheckpoint() {
defaults.removeObject(forKey: SyncConfig.changeTokenKey)
func clearSyncCheckpoint() -> Void {
defaults.removeObject(forKey: changeTokenKey)
}
func checkpointSync() {
guard #available(iOS 16, *) else { return }
saveChangeToken(token: photoLibrary.currentChangeToken)
guard #available(iOS 16, *) else {
return
}
saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken)
}
func sync(full: Bool = false) async throws {
let start = Date()
defer { logger.info("Sync completed in \(Int(Date().timeIntervalSince(start) * 1000))ms") }
guard !full, !shouldFullSync(), let delta = try? getMediaChanges(), delta.hasChanges
else {
logger.debug("Full sync: \(full ? "user requested" : "required")")
return try await fullSync()
}
logger.debug("Delta sync: +\(delta.updates.count) -\(delta.deletes.count)")
let albumFetchOptions = PHFetchOptions()
albumFetchOptions.predicate = NSPredicate(format: "assetCollectionSubtype != %d", SyncConfig.recoveredAlbumSubType)
try await db.write { conn in
try #sql("pragma temp_store = 2").execute(conn)
try #sql("create temp table current_albums(id text primary key) without rowid").execute(conn)
var cloudAlbums = [PHAssetCollection]()
for type in SyncConfig.albumTypes {
photoLibrary.fetchAlbums(with: type, subtype: .any, options: albumFetchOptions)
.enumerateObjects { album, _, _ in
try? CurrentAlbum.insert { CurrentAlbum(id: album.localIdentifier) }.execute(conn)
try? upsertAlbum(album, conn: conn)
if album.isCloud {
cloudAlbums.append(album)
}
}
}
try LocalAlbum.delete().where { localAlbum in
localAlbum.backupSelection.eq(BackupSelection.none) && !CurrentAlbum.where { $0.id == localAlbum.id }.exists()
}.execute(conn)
for asset in delta.updates {
try upsertAsset(asset, conn: conn)
}
if !delta.deletes.isEmpty {
try LocalAsset.delete().where { $0.id.in(delta.deletes) }.execute(conn)
}
try self.updateAssetAlbumLinks(delta.assetAlbums, conn: conn)
}
// On iOS, we need to full sync albums that are marked as cloud as the delta sync
// does not include changes for cloud albums. If ignoreIcloudAssets is enabled,
// remove the albums from the local database from the previous sync
if !cloudAlbums.isEmpty {
try await syncCloudAlbums(cloudAlbums)
}
checkpointSync()
}
private func fullSync() async throws {
let start = Date()
defer { logger.info("Full sync completed in \(Int(Date().timeIntervalSince(start) * 1000))ms") }
let dbAlbumIds = try await db.read { conn in
try LocalAlbum.all.select(\.id).order { $0.id }.fetchAll(conn)
}
let albumFetchOptions = PHFetchOptions()
albumFetchOptions.predicate = NSPredicate(format: "assetCollectionSubtype != %d", SyncConfig.recoveredAlbumSubType)
albumFetchOptions.sortDescriptors = SyncConfig.sortDescriptors
let albums = photoLibrary.fetchAlbums(with: .album, subtype: .any, options: albumFetchOptions)
let smartAlbums = photoLibrary.fetchAlbums(with: .smartAlbum, subtype: .any, options: albumFetchOptions)
try await withThrowingTaskGroup(of: Void.self) { group in
var dbIndex = 0
var albumIndex = 0
var smartAlbumIndex = 0
// Three-pointer merge: dbAlbumIds, albums, smartAlbums
while albumIndex < albums.count || smartAlbumIndex < smartAlbums.count {
let currentAlbum = albumIndex < albums.count ? albums.object(at: albumIndex) : nil
let currentSmartAlbum = smartAlbumIndex < smartAlbums.count ? smartAlbums.object(at: smartAlbumIndex) : nil
let useRegular =
currentSmartAlbum == nil
|| (currentAlbum != nil && currentAlbum!.localIdentifier < currentSmartAlbum!.localIdentifier)
let nextAlbum = useRegular ? currentAlbum! : currentSmartAlbum!
let deviceId = nextAlbum.localIdentifier
while dbIndex < dbAlbumIds.count && dbAlbumIds[dbIndex] < deviceId {
let albumToRemove = dbAlbumIds[dbIndex]
group.addTask { try await self.removeAlbum(albumId: albumToRemove) }
dbIndex += 1
}
if dbIndex < dbAlbumIds.count && dbAlbumIds[dbIndex] == deviceId {
group.addTask { try await self.syncAlbum(albumId: deviceId, deviceAlbum: nextAlbum) }
dbIndex += 1
} else {
group.addTask { try await self.addAlbum(nextAlbum) }
}
if useRegular {
albumIndex += 1
} else {
smartAlbumIndex += 1
}
}
// Remove any remaining DB albums
while dbIndex < dbAlbumIds.count {
let albumToRemove = dbAlbumIds[dbIndex]
group.addTask { try await self.removeAlbum(albumId: albumToRemove) }
dbIndex += 1
}
try await group.waitForAll()
}
checkpointSync()
}
private func shouldFullSync() -> Bool {
guard #available(iOS 16, *), photoLibrary.isAuthorized, let token = getChangeToken(),
(try? photoLibrary.fetchPersistentChanges(since: token)) != nil
else {
func shouldFullSync() -> Bool {
guard #available(iOS 16, *),
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized,
let storedToken = getChangeToken() else {
// When we do not have access to photo library, older iOS version or No token available, fallback to full sync
return true
}
guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else {
// Cannot fetch persistent changes
return true
}
return false
}
private func addAlbum(_ album: PHAssetCollection) async throws {
let options = PHFetchOptions()
options.includeHiddenAssets = false
if let timestamp = album.updatedAt {
let date = timestamp as NSDate
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
}
let result = photoLibrary.fetchAssets(in: album, options: options)
try await self.db.write { conn in
try upsertStreamedAssets(result: result, albumId: album.localIdentifier, conn: conn)
}
}
private func upsertStreamedAssets(result: PHFetchResult<PHAsset>, albumId: String, conn: Database) throws {
result.enumerateObjects { asset, _, stop in
do {
try self.upsertAsset(asset, conn: conn)
try self.linkAsset(asset.localIdentifier, toAlbum: albumId, conn: conn)
} catch {
stop.pointee = true
}
}
if let error = conn.lastErrorMessage {
throw LocalSyncError.assetUpsertFailed
}
}
/// Remove all assets that are only in this particular album.
/// We cannot remove all assets in the album because they might be in other albums in iOS.
private func removeAlbum(albumId: String) async throws {
try await db.write { conn in
try LocalAsset.delete().where { $0.id.in(LocalAlbumAsset.uniqueAssetIds(albumId: albumId)) }.execute(conn)
try LocalAlbum.delete()
.where { $0.id.eq(albumId) && $0.backupSelection.eq(BackupSelection.none) }
.execute(conn)
}
}
private func syncAlbum(albumId: String, deviceAlbum: PHAssetCollection) async throws {
let dbAlbum = try await db.read { conn in
try LocalAlbum.all.where { $0.id.eq(albumId) }.fetchOne(conn)
}
guard let dbAlbum else { return try await addAlbum(deviceAlbum) }
// Check if unchanged
guard dbAlbum.name != deviceAlbum.localizedTitle || dbAlbum.updatedAt != deviceAlbum.updatedAt
else { return }
try await fullDiffAlbum(dbAlbum: dbAlbum, deviceAlbum: deviceAlbum)
}
private func fullDiffAlbum(dbAlbum: LocalAlbum, deviceAlbum: PHAssetCollection) async throws {
let options = PHFetchOptions()
options.includeHiddenAssets = false
let date = dbAlbum.updatedAt as NSDate
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
options.sortDescriptors = SyncConfig.sortDescriptors
var deviceAssetIds: [String] = []
let result = photoLibrary.fetchAssets(in: deviceAlbum, options: options)
result.enumerateObjects { asset, _, _ in
deviceAssetIds.append(asset.localIdentifier)
}
let dbAssetIds = try await db.read { conn in
try LocalAlbumAsset.all
.where { $0.id.albumId.eq(dbAlbum.id) }
.select(\.id.assetId)
.order { $0.id.assetId }
.fetchAll(conn)
}
let (toFetch, toDelete) = diffSortedArrays(dbAssetIds, deviceAssetIds)
guard !toFetch.isEmpty || !toDelete.isEmpty else { return }
logger.debug("Syncing \(deviceAlbum.localizedTitle ?? "album"): +\(toFetch.count) -\(toDelete.count)")
try await db.write { conn in
try self.updateAlbum(deviceAlbum, conn: conn)
}
for batch in toFetch.chunks(ofCount: SyncConfig.batchSize) {
let options = PHFetchOptions()
options.includeHiddenAssets = false
let result = photoLibrary.fetchAssets(withLocalIdentifiers: Array(batch), options: options)
try await db.write { conn in
try upsertStreamedAssets(result: result, albumId: deviceAlbum.localIdentifier, conn: conn)
}
}
guard !toDelete.isEmpty else { return }
let uniqueAssetIds = try await db.read { conn in
return try LocalAlbumAsset.uniqueAssetIds(albumId: deviceAlbum.localIdentifier).fetchAll(conn)
}
// Delete unique assets and unlink others
var toDeleteSet = Set(toDelete)
let uniqueIds = toDeleteSet.intersection(uniqueAssetIds)
toDeleteSet.subtract(uniqueIds)
let toUnlink = toDeleteSet
guard !toDeleteSet.isEmpty || !toUnlink.isEmpty else { return }
try await db.write { conn in
if !uniqueIds.isEmpty {
try LocalAsset.delete().where { $0.id.in(Array(uniqueIds)) }.execute(conn)
}
if !toUnlink.isEmpty {
try LocalAlbumAsset.delete()
.where { $0.id.assetId.in(Array(toUnlink)) && $0.id.albumId.eq(deviceAlbum.localIdentifier) }
.execute(conn)
}
}
}
private func syncCloudAlbums(_ albums: [PHAssetCollection]) async throws {
try await withThrowingTaskGroup(of: Void.self) { group in
for album in albums {
group.addTask {
let dbAlbum = try await self.db.read { conn in
try LocalAlbum.all.where { $0.id.eq(album.localIdentifier) }.fetchOne(conn)
}
guard let dbAlbum else { return }
let deviceIds = try self.getAssetIdsForAlbum(albumId: album.localIdentifier)
let dbIds = try await self.db.read { conn in
try LocalAlbumAsset.all
.where { $0.id.albumId.eq(album.localIdentifier) }
.select(\.id.assetId)
.order { $0.id.assetId }
.fetchAll(conn)
}
guard deviceIds != dbIds else { return }
try await self.fullDiffAlbum(dbAlbum: dbAlbum, deviceAlbum: album)
func getAlbums() throws -> [PlatformAlbum] {
var albums: [PlatformAlbum] = []
albumTypes.forEach { type in
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
for i in 0..<collections.count {
let album = collections.object(at: i)
// Ignore recovered album
if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) {
continue;
}
}
try await group.waitForAll()
}
}
private func upsertAlbum(_ album: PHAssetCollection, conn: Database) throws {
try LocalAlbum.insert {
LocalAlbum(
id: album.localIdentifier,
backupSelection: .none,
linkedRemoteAlbumId: nil,
marker_: nil,
name: album.localizedTitle ?? "",
isIosSharedAlbum: album.isCloud,
updatedAt: album.updatedAt ?? Date()
)
} onConflict: {
$0.id
} doUpdate: { old, new in
old.name = new.name
old.updatedAt = new.updatedAt
old.isIosSharedAlbum = new.isIosSharedAlbum
old.marker_ = new.marker_
}.execute(conn)
}
private func updateAlbum(_ album: PHAssetCollection, conn: Database) throws {
try LocalAlbum.update { row in
row.name = album.localizedTitle ?? ""
row.updatedAt = album.updatedAt ?? Date()
row.isIosSharedAlbum = album.isCloud
}.where { $0.id.eq(album.localIdentifier) }.execute(conn)
}
private func upsertAsset(_ asset: PHAsset, conn: Database) throws {
guard let assetType = AssetType(rawValue: asset.mediaType.rawValue) else {
throw LocalSyncError.unsupportedAssetType(asset.mediaType.rawValue)
}
let dateStr = Self.dateFormatter.string(from: asset.creationDate ?? Date())
try LocalAsset.insert {
LocalAsset(
id: asset.localIdentifier,
checksum: nil,
createdAt: dateStr,
durationInSeconds: Int64(asset.duration),
height: asset.pixelHeight,
isFavorite: asset.isFavorite,
name: asset.title,
orientation: "0",
type: assetType,
updatedAt: dateStr,
width: asset.pixelWidth
)
} onConflict: {
$0.id
} doUpdate: { old, new in
old.name = new.name
old.type = new.type
old.updatedAt = new.updatedAt
old.width = new.width
old.height = new.height
old.durationInSeconds = new.durationInSeconds
old.isFavorite = new.isFavorite
old.orientation = new.orientation
}.execute(conn)
}
private func linkAsset(_ assetId: String, toAlbum albumId: String, conn: Database) throws {
try LocalAlbumAsset.insert {
LocalAlbumAsset(id: LocalAlbumAsset.ID(assetId: assetId, albumId: albumId), marker_: nil)
} onConflict: {
($0.id.assetId, $0.id.albumId)
}.execute(conn)
}
private func updateAssetAlbumLinks(_ assetAlbums: [String: [String]], conn: Database) throws {
for (assetId, albumIds) in assetAlbums {
// Delete old links not in the new set
try LocalAlbumAsset.delete()
.where { $0.id.assetId.eq(assetId) && !$0.id.albumId.in(albumIds) }
.execute(conn)
// Insert new links
for albumId in albumIds {
try LocalAlbumAsset.insert {
LocalAlbumAsset(id: LocalAlbumAsset.ID(assetId: assetId, albumId: albumId), marker_: nil)
} onConflict: {
($0.id.assetId, $0.id.albumId)
}.execute(conn)
let options = PHFetchOptions()
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
options.includeHiddenAssets = false
let assets = getAssetsFromAlbum(in: album, options: options)
let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream
var domainAlbum = PlatformAlbum(
id: album.localIdentifier,
name: album.localizedTitle!,
updatedAt: nil,
isCloud: isCloud,
assetCount: Int64(assets.count)
)
if let firstAsset = assets.firstObject {
domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) }
}
albums.append(domainAlbum)
}
}
return albums.sorted { $0.id < $1.id }
}
private func fetchAssetsByIds(_ ids: [String]) throws -> [PHAsset] {
let options = PHFetchOptions()
options.includeHiddenAssets = false
let result = photoLibrary.fetchAssets(withLocalIdentifiers: ids, options: options)
var assets: [PHAsset] = []
assets.reserveCapacity(ids.count)
result.enumerateObjects { asset, _, _ in assets.append(asset) }
return assets
}
private func getMediaChanges() throws -> NativeSyncDelta {
func getMediaChanges() throws -> SyncDelta {
guard #available(iOS 16, *) else {
throw LocalSyncError.unsupportedOS
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil)
}
guard photoLibrary.isAuthorized else {
throw LocalSyncError.photoAccessDenied
guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else {
throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil)
}
guard let storedToken = getChangeToken() else {
throw LocalSyncError.noChangeToken
// No token exists, definitely need a full sync
print("MediaManager::getMediaChanges: No token found")
throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil)
}
let currentToken = photoLibrary.currentChangeToken
guard storedToken != currentToken else {
return NativeSyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
let currentToken = PHPhotoLibrary.shared().currentChangeToken
if storedToken == currentToken {
return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
}
let changes = try photoLibrary.fetchPersistentChanges(since: storedToken)
var updatedIds = Set<String>()
var deletedIds = Set<String>()
for change in changes {
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
updatedIds.formUnion(details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers))
deletedIds.formUnion(details.deletedLocalIdentifiers)
}
guard !updatedIds.isEmpty || !deletedIds.isEmpty else {
return NativeSyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
}
let updatedIdArray = Array(updatedIds)
let options = PHFetchOptions()
options.includeHiddenAssets = false
let result = photoLibrary.fetchAssets(withLocalIdentifiers: updatedIdArray, options: options)
var updates: [PHAsset] = []
result.enumerateObjects { asset, _, _ in updates.append(asset) }
return NativeSyncDelta(
hasChanges: true,
updates: updates,
deletes: Array(deletedIds),
assetAlbums: buildAssetAlbumsMap(assetIds: updatedIdArray)
)
}
private func buildAssetAlbumsMap(assetIds: [String]) -> [String: [String]] {
guard !assetIds.isEmpty else { return [:] }
var result: [String: [String]] = [:]
let options = PHFetchOptions()
options.predicate = NSPredicate(format: "localIdentifier IN %@", assetIds)
options.includeHiddenAssets = false
for type in SyncConfig.albumTypes {
photoLibrary.fetchAssetCollections(with: type, subtype: .any, options: nil)
.enumerateObjects { album, _, _ in
photoLibrary.fetchAssets(in: album, options: options)
.enumerateObjects { asset, _, _ in
result[asset.localIdentifier, default: []].append(album.localIdentifier)
}
do {
let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken)
var updatedAssets: Set<AssetWrapper> = []
var deletedAssets: Set<String> = []
for change in changes {
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers)
deletedAssets.formUnion(details.deletedLocalIdentifiers)
if (updated.isEmpty) { continue }
let options = PHFetchOptions()
options.includeHiddenAssets = false
let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options)
for i in 0..<result.count {
let asset = result.object(at: i)
// Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes
let predicate = PlatformAsset(
id: asset.localIdentifier,
name: "",
type: 0,
durationInSeconds: 0,
orientation: 0,
isFavorite: false
)
if (updatedAssets.contains(AssetWrapper(with: predicate))) {
continue
}
let domainAsset = AssetWrapper(with: asset.toPlatformAsset())
updatedAssets.insert(domainAsset)
}
}
let updates = Array(updatedAssets.map { $0.asset })
return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates))
}
return result
}
private func getAssetIdsForAlbum(albumId: String) throws -> [String] {
guard let album = photoLibrary.fetchAssetCollection(albumId: albumId, options: nil) else { return [] }
private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] {
guard !assets.isEmpty else {
return [:]
}
var albumAssets: [String: [String]] = [:]
for type in albumTypes {
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
collections.enumerateObjects { (album, _, _) in
let options = PHFetchOptions()
options.predicate = NSPredicate(format: "localIdentifier IN %@", assets.map(\.id))
options.includeHiddenAssets = false
let result = self.getAssetsFromAlbum(in: album, options: options)
result.enumerateObjects { (asset, _, _) in
albumAssets[asset.localIdentifier, default: []].append(album.localIdentifier)
}
}
}
return albumAssets
}
func getAssetIdsForAlbum(albumId: String) throws -> [String] {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else {
return []
}
var ids: [String] = []
let options = PHFetchOptions()
options.includeHiddenAssets = false
options.sortDescriptors = [NSSortDescriptor(key: "localIdentifier", ascending: true)]
var ids: [String] = []
photoLibrary.fetchAssets(in: album, options: options).enumerateObjects { asset, _, _ in
let assets = getAssetsFromAlbum(in: album, options: options)
assets.enumerateObjects { (asset, _, _) in
ids.append(asset.localIdentifier)
}
return ids
}
private func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PHAsset] {
guard let album = photoLibrary.fetchAssetCollection(albumId: albumId, options: nil) else { return [] }
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else {
return 0
}
let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp))
let options = PHFetchOptions()
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
options.includeHiddenAssets = false
let assets = getAssetsFromAlbum(in: album, options: options)
return Int64(assets.count)
}
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else {
return []
}
let options = PHFetchOptions()
options.includeHiddenAssets = false
if let timestamp = updatedTimeCond {
let date = Date(timeIntervalSince1970: TimeInterval(timestamp))
options.predicate = NSPredicate(
format: "creationDate > %@ OR modificationDate > %@",
date as NSDate,
date as NSDate
)
if(updatedTimeCond != nil) {
let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!))
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
}
let result = getAssetsFromAlbum(in: album, options: options)
if(result.count == 0) {
return []
}
var assets: [PlatformAsset] = []
result.enumerateObjects { (asset, _, _) in
assets.append(asset.toPlatformAsset())
}
let result = photoLibrary.fetchAssets(in: album, options: options)
var assets: [PHAsset] = []
result.enumerateObjects { asset, _, _ in assets.append(asset) }
return assets
}
}
func diffSortedArrays<T: Comparable & Hashable>(_ a: [T], _ b: [T]) -> (toAdd: [T], toRemove: [T]) {
var toAdd: [T] = []
var toRemove: [T] = []
var i = 0
var j = 0
while i < a.count && j < b.count {
if a[i] < b[j] {
toRemove.append(a[i])
i += 1
} else if b[j] < a[i] {
toAdd.append(b[j])
j += 1
} else {
i += 1
j += 1
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) {
if let prevTask = hashTask {
prevTask.cancel()
hashTask = nil
}
hashTask = Task { [weak self] in
var missingAssetIds = Set(assetIds)
var assets = [PHAsset]()
assets.reserveCapacity(assetIds.count)
PHAsset.fetchAssets(withLocalIdentifiers: assetIds, options: nil).enumerateObjects { (asset, _, stop) in
if Task.isCancelled {
stop.pointee = true
return
}
missingAssetIds.remove(asset.localIdentifier)
assets.append(asset)
}
if Task.isCancelled {
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
await withTaskGroup(of: HashResult?.self) { taskGroup in
var results = [HashResult]()
results.reserveCapacity(assets.count)
for asset in assets {
if Task.isCancelled {
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
taskGroup.addTask {
guard let self = self else { return nil }
return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess)
}
}
for await result in taskGroup {
guard let result = result else {
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
}
results.append(result)
}
for missing in missingAssetIds {
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
}
return self?.completeWhenActive(for: completion, with: .success(results))
}
}
}
func cancelHashing() {
hashTask?.cancel()
hashTask = nil
}
private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? {
class RequestRef {
var id: PHAssetResourceDataRequestID?
}
let requestRef = RequestRef()
return await withTaskCancellationHandler(operation: {
if Task.isCancelled {
return nil
}
guard let resource = asset.getResource() else {
return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil)
}
if Task.isCancelled {
return nil
}
let options = PHAssetResourceRequestOptions()
options.isNetworkAccessAllowed = allowNetworkAccess
return await withCheckedContinuation { continuation in
var hasher = Insecure.SHA1()
requestRef.id = PHAssetResourceManager.default().requestData(
for: resource,
options: options,
dataReceivedHandler: { data in
hasher.update(data: data)
},
completionHandler: { error in
let result: HashResult? = switch (error) {
case let e as PHPhotosError where e.code == .userCancelled: nil
case let .some(e): HashResult(
assetId: asset.localIdentifier,
error: "Failed to hash asset: \(e.localizedDescription)",
hash: nil
)
case .none:
HashResult(
assetId: asset.localIdentifier,
error: nil,
hash: Data(hasher.finalize()).base64EncodedString()
)
}
continuation.resume(returning: result)
}
)
}
}, onCancel: {
guard let requestId = requestRef.id else { return }
PHAssetResourceManager.default().cancelDataRequest(requestId)
})
}
private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
// Ensure to actually getting all assets for the Recents album
if (album.assetCollectionSubtype == .smartAlbumUserLibrary) {
return PHAsset.fetchAssets(with: options)
} else {
return PHAsset.fetchAssets(in: album, options: options)
}
}
toRemove.append(contentsOf: a[i...])
toAdd.append(contentsOf: b[j...])
return (toAdd, toRemove)
}
private struct NativeSyncDelta: Hashable {
var hasChanges: Bool
var updates: [PHAsset]
var deletes: [String]
var assetAlbums: [String: [String]]
}
/// Temp table to avoid parameter limit for album changes.
@Table("current_albums")
private struct CurrentAlbum {
let id: String
}

View File

@@ -1,6 +1,21 @@
import Photos
extension PHAsset {
func toPlatformAsset() -> PlatformAsset {
return PlatformAsset(
id: localIdentifier,
name: title,
type: Int64(mediaType.rawValue),
createdAt: creationDate.map { Int64($0.timeIntervalSince1970) },
updatedAt: modificationDate.map { Int64($0.timeIntervalSince1970) },
width: Int64(pixelWidth),
height: Int64(pixelHeight),
durationInSeconds: Int64(duration),
orientation: 0,
isFavorite: isFavorite
)
}
var title: String {
return filename ?? originalFilename ?? "<unknown>"
}
@@ -37,23 +52,6 @@ extension PHAsset {
return nil
}
func getLivePhotoResource() -> PHAssetResource? {
let resources = PHAssetResource.assetResources(for: self)
var livePhotoResource: PHAssetResource?
for resource in resources {
if resource.type == .fullSizePairedVideo {
return resource
}
if resource.type == .pairedVideo {
livePhotoResource = resource
}
}
return livePhotoResource
}
private func isValidResourceType(_ type: PHAssetResourceType) -> Bool {
switch mediaType {
@@ -77,37 +75,3 @@ extension PHAsset {
}
}
}
extension PHAssetCollection {
private static let latestAssetOptions: PHFetchOptions = {
let options = PHFetchOptions()
options.includeHiddenAssets = false
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
options.fetchLimit = 1
return options
}()
var isCloud: Bool { assetCollectionSubtype == .albumCloudShared || assetCollectionSubtype == .albumMyPhotoStream }
var updatedAt: Date? {
let result: PHFetchResult<PHAsset>
if assetCollectionSubtype == .smartAlbumUserLibrary {
result = PHAsset.fetchAssets(with: Self.latestAssetOptions)
} else {
result = PHAsset.fetchAssets(in: self, options: Self.latestAssetOptions)
}
return result.firstObject?.modificationDate
}
static func fetchAssetCollection(albumId: String, options: PHFetchOptions? = nil) -> PHAssetCollection? {
let albums = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: options)
return albums.firstObject
}
static func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
album.assetCollectionSubtype == .smartAlbumUserLibrary
? PHAsset.fetchAssets(with: options)
: PHAsset.fetchAssets(in: album, options: options)
}
}

View File

@@ -1,53 +0,0 @@
import Photos
protocol PhotoLibraryProvider {
var isAuthorized: Bool { get }
@available(iOS 16, *)
var currentChangeToken: PHPersistentChangeToken { get }
func fetchAlbums(sorted: Bool) -> [PHAssetCollection]
func fetchAlbums(with type: PHAssetCollectionType, subtype: PHAssetCollectionSubtype, options: PHFetchOptions?) -> PHFetchResult<PHAssetCollection>
func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult<PHAsset>
func fetchAssets(withIdentifiers ids: [String], options: PHFetchOptions?) -> PHFetchResult<PHAsset>
@available(iOS 16, *)
func fetchPersistentChanges(since token: PHPersistentChangeToken) throws -> PHPersistentChangeFetchResult
}
struct PhotoLibrary: PhotoLibraryProvider {
static let shared: PhotoLibrary = .init()
private init() {}
func fetchAlbums(with type: PHAssetCollectionType, subtype: PHAssetCollectionSubtype, options: PHFetchOptions?) -> PHFetchResult<PHAssetCollection> {
PHAssetCollection.fetchAssetCollections(with: type, subtype: subtype, options: options)
}
func fetchAssetCollection(albumId: String, options: PHFetchOptions? = nil) -> PHAssetCollection? {
let albums = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: options)
return albums.firstObject
}
func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult<PHAsset> {
album.assetCollectionSubtype == .smartAlbumUserLibrary
? PHAsset.fetchAssets(with: options)
: PHAsset.fetchAssets(in: album, options: options)
}
func fetchAssets(withIdentifiers ids: [String], options: PHFetchOptions?) -> PHFetchResult<PHAsset> {
PHAsset.fetchAssets(withLocalIdentifiers: ids, options: options)
}
@available(iOS 16, *)
func fetchPersistentChanges(since token: PHPersistentChangeToken) throws -> PHPersistentChangeFetchResult {
try PHPhotoLibrary.shared().fetchPersistentChanges(since: token)
}
@available(iOS 16, *)
var currentChangeToken: PHPersistentChangeToken {
PHPhotoLibrary.shared().currentChangeToken
}
var isAuthorized: Bool {
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized
}
}

View File

@@ -1,208 +0,0 @@
import SQLiteData
class UploadApiDelegate: NSObject, URLSessionDataDelegate, URLSessionTaskDelegate {
private static let stateLock = NSLock()
private static var transferStates: [Int64: NetworkTransferState] = [:]
private static var responseData: [Int64: Data] = [:]
private static let jsonDecoder = JSONDecoder()
private let db: DatabasePool
private let statusListener: StatusEventListener
private let progressListener: ProgressEventListener
weak var downloadQueue: DownloadQueue?
weak var uploadQueue: UploadQueue?
init(db: DatabasePool, statusListener: StatusEventListener, progressListener: ProgressEventListener) {
self.db = db
self.statusListener = statusListener
self.progressListener = progressListener
}
static func reset() {
stateLock.withLock {
transferStates.removeAll()
responseData.removeAll()
}
}
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
guard let taskIdStr = dataTask.taskDescription,
let taskId = Int64(taskIdStr)
else { return }
Self.stateLock.withLock {
if var response = Self.responseData[taskId] {
response.append(data)
} else {
Self.responseData[taskId] = data
}
}
}
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
Task {
defer {
downloadQueue?.startQueueProcessing()
uploadQueue?.startQueueProcessing()
}
guard let taskDescriptionId = task.taskDescription,
let taskId = Int64(taskDescriptionId)
else {
return dPrint("Unexpected: task without session ID completed")
}
defer {
Self.stateLock.withLock { let _ = Self.transferStates.removeValue(forKey: taskId) }
}
if let responseData = Self.stateLock.withLock({ Self.responseData.removeValue(forKey: taskId) }),
let httpResponse = task.response as? HTTPURLResponse
{
switch httpResponse.statusCode {
case 200, 201:
do {
let response = try Self.jsonDecoder.decode(UploadSuccessResponse.self, from: responseData)
return await handleSuccess(taskId: taskId, response: response)
} catch {
return await handleFailure(taskId: taskId, code: .invalidResponse)
}
case 400..<500:
dPrint(
"Response \(httpResponse.statusCode): \(String(data: responseData, encoding: .utf8) ?? "No response body")"
)
return await handleFailure(taskId: taskId, code: .badRequest)
default:
break
}
}
guard let urlError = error as? URLError else {
return await handleFailure(taskId: taskId)
}
if #available(iOS 17, *), let resumeData = urlError.uploadTaskResumeData {
return await handleFailure(taskDescriptionId: taskDescriptionId, session: session, resumeData: resumeData)
}
let code: UploadErrorCode =
switch urlError.backgroundTaskCancelledReason {
case .backgroundUpdatesDisabled: .backgroundUpdatesDisabled
case .insufficientSystemResources: .outOfResources
case .userForceQuitApplication: .forceQuit
default:
switch urlError.code {
case .networkConnectionLost, .notConnectedToInternet: .networkError
case .timedOut: .uploadTimeout
case .resourceUnavailable, .fileDoesNotExist: .fileNotFound
default: .unknown
}
}
await handleFailure(taskId: taskId, code: code)
}
}
func urlSession(
_ session: URLSession,
task: URLSessionTask,
didSendBodyData bytesSent: Int64,
totalBytesSent: Int64,
totalBytesExpectedToSend: Int64
) {
guard let sessionTaskId = task.taskDescription, let taskId = Int64(sessionTaskId) else { return }
let currentTime = Date()
let state = Self.stateLock.withLock {
if let existing = Self.transferStates[taskId] {
return existing
}
let new = NetworkTransferState(
lastUpdateTime: currentTime,
totalBytesTransferred: totalBytesSent,
currentSpeed: nil
)
Self.transferStates[taskId] = new
return new
}
let timeDelta = currentTime.timeIntervalSince(state.lastUpdateTime)
guard timeDelta > 0 else { return }
let bytesDelta = totalBytesSent - state.totalBytesTransferred
let instantSpeed = Double(bytesDelta) / timeDelta
let currentSpeed =
if let previousSpeed = state.currentSpeed {
TaskConfig.transferSpeedAlpha * instantSpeed + (1 - TaskConfig.transferSpeedAlpha) * previousSpeed
} else {
instantSpeed
}
state.currentSpeed = currentSpeed
state.lastUpdateTime = currentTime
state.totalBytesTransferred = totalBytesSent
self.progressListener.onTaskProgress(
UploadApiTaskProgress(
id: sessionTaskId,
progress: Double(totalBytesSent) / Double(totalBytesExpectedToSend),
speed: currentSpeed
)
)
}
private func handleSuccess(taskId: Int64, response: UploadSuccessResponse) async {
dPrint("Upload succeeded for task \(taskId), server ID: \(response.id)")
do {
try await db.write { conn in
let task = try UploadTask.update { $0.status = .uploadComplete }.where({ $0.id.eq(taskId) })
.returning(\.self).fetchOne(conn)
guard let task, let isLivePhoto = task.isLivePhoto, isLivePhoto, task.livePhotoVideoId == nil else { return }
try UploadTask.insert {
UploadTask.Draft(
attempts: 0,
createdAt: Date(),
filePath: nil,
isLivePhoto: true,
lastError: nil,
livePhotoVideoId: response.id,
localId: task.localId,
method: .multipart,
priority: 0.7,
retryAfter: nil,
status: .downloadPending,
)
}.execute(conn)
}
dPrint("Updated upload success status for session task \(taskId)")
} catch {
dPrint(
"Failed to update upload success status for session task \(taskId): \(error.localizedDescription)"
)
}
}
private func handleFailure(taskId: Int64, code: UploadErrorCode = .unknown) async {
dPrint("Upload failed for task \(taskId) with code \(code)")
try? await db.write { conn in
try UploadTask.retryOrFail(code: code, status: .uploadFailed).where { $0.id.eq(taskId) }
.execute(conn)
}
}
@available(iOS 17, *)
private func handleFailure(taskDescriptionId: String, session: URLSession, resumeData: Data) async {
dPrint("Resuming upload for task \(taskDescriptionId)")
let resumeTask = session.uploadTask(withResumeData: resumeData)
resumeTask.taskDescription = taskDescriptionId
resumeTask.resume()
}
private class NetworkTransferState {
var lastUpdateTime: Date
var totalBytesTransferred: Int64
var currentSpeed: Double?
init(lastUpdateTime: Date, totalBytesTransferred: Int64, currentSpeed: Double?) {
self.lastUpdateTime = lastUpdateTime
self.totalBytesTransferred = totalBytesTransferred
self.currentSpeed = currentSpeed
}
}
}

View File

@@ -1,351 +0,0 @@
import CryptoKit
import Photos
import SQLiteData
class DownloadQueue {
private static let resourceManager = PHAssetResourceManager.default()
private static var queueProcessingTask: Task<Void, Never>?
private static var queueProcessingLock = NSLock()
private let db: DatabasePool
private let uploadQueue: UploadQueue
private let statusListener: StatusEventListener
private let progressListener: ProgressEventListener
init(
db: DatabasePool,
uploadQueue: UploadQueue,
statusListener: StatusEventListener,
progressListener: ProgressEventListener
) {
self.db = db
self.uploadQueue = uploadQueue
self.statusListener = statusListener
self.progressListener = progressListener
NotificationCenter.default.addObserver(forName: .networkDidConnect, object: nil, queue: nil) { [weak self] _ in
dPrint("Network connected")
self?.startQueueProcessing()
}
}
func enqueueAssets(localIds: [String]) async throws {
guard !localIds.isEmpty else { return dPrint("No assets to enqueue") }
defer { startQueueProcessing() }
let candidates = try await db.read { conn in
return try LocalAsset.all
.where { asset in asset.id.in(localIds) }
.select { LocalAssetCandidate.Columns(id: $0.id, type: $0.type) }
.limit { _ in UploadTaskStat.availableSlots }
.fetchAll(conn)
}
guard !candidates.isEmpty else { return dPrint("No candidates to enqueue") }
try await db.write { conn in
var draft = UploadTask.Draft(
attempts: 0,
createdAt: Date(),
filePath: nil,
isLivePhoto: nil,
lastError: nil,
livePhotoVideoId: nil,
localId: "",
method: .multipart,
priority: 0.5,
retryAfter: nil,
status: .downloadPending,
)
for candidate in candidates {
draft.localId = candidate.id
draft.priority = candidate.type == .image ? 0.9 : 0.8
try UploadTask.insert {
draft
} onConflict: {
($0.localId, $0.livePhotoVideoId)
}.execute(conn)
}
}
dPrint("Enqueued \(candidates.count) assets for upload")
}
func startQueueProcessing() {
dPrint("Starting download queue processing")
Self.queueProcessingLock.withLock {
guard Self.queueProcessingTask == nil else { return }
Self.queueProcessingTask = Task {
await startDownloads()
Self.queueProcessingLock.withLock { Self.queueProcessingTask = nil }
}
}
}
private func startDownloads() async {
dPrint("Processing download queue")
guard NetworkMonitor.shared.isConnected else {
return dPrint("Download queue paused: network disconnected")
}
do {
let tasks: [LocalAssetDownloadData] = try await db.read({ conn in
guard let backupEnabled = try Store.get(conn, StoreKey.enableBackup), backupEnabled else { return [] }
return try UploadTask.join(LocalAsset.all) { task, asset in task.localId.eq(asset.id) }
.where { task, asset in
asset.checksum.isNot(nil) && task.status.eq(TaskStatus.downloadPending)
&& task.attempts < TaskConfig.maxRetries
&& (task.retryAfter.is(nil) || task.retryAfter.unwrapped <= Date().unixTime)
&& (task.lastError.is(nil)
|| !task.lastError.unwrapped.in([
UploadErrorCode.assetNotFound, UploadErrorCode.resourceNotFound, UploadErrorCode.invalidResource,
]))
}
.select { task, asset in
LocalAssetDownloadData.Columns(
checksum: asset.checksum,
createdAt: asset.createdAt,
livePhotoVideoId: task.livePhotoVideoId,
localId: asset.id,
taskId: task.id,
updatedAt: asset.updatedAt
)
}
.order { task, asset in (task.priority.desc(), task.createdAt) }
.limit { _, _ in UploadTaskStat.availableDownloadSlots }
.fetchAll(conn)
})
if tasks.isEmpty { return dPrint("No download tasks to process") }
try await withThrowingTaskGroup(of: Void.self) { group in
var iterator = tasks.makeIterator()
for _ in 0..<min(TaskConfig.maxActiveDownloads, tasks.count) {
if let task = iterator.next() {
group.addTask { await self.downloadAndQueue(task) }
}
}
while try await group.next() != nil {
if let task = iterator.next() {
group.addTask { await self.downloadAndQueue(task) }
}
}
}
} catch {
dPrint("Download queue error: \(error)")
}
}
private func downloadAndQueue(_ task: LocalAssetDownloadData) async {
defer { startQueueProcessing() }
dPrint("Starting download for task \(task.taskId)")
guard let asset = PHAsset.fetchAssets(withLocalIdentifiers: [task.localId], options: nil).firstObject
else {
dPrint("Asset not found")
return handleFailure(task: task, code: .assetNotFound)
}
let isLivePhoto = asset.mediaSubtypes.contains(.photoLive)
let isMotion = isLivePhoto && task.livePhotoVideoId != nil
guard let resource = isMotion ? asset.getLivePhotoResource() : asset.getResource() else {
dPrint("Resource not found")
return handleFailure(task: task, code: .resourceNotFound)
}
guard let deviceId = (try? await db.read { conn in try Store.get(conn, StoreKey.deviceId) }) else {
dPrint("Device ID not found")
return handleFailure(task: task, code: .noDeviceId)
}
let fileDir = TaskConfig.originalsDir
let fileName = "\(resource.assetLocalIdentifier.replacingOccurrences(of: "/", with: "_"))_\(resource.type.rawValue)"
let filePath = fileDir.appendingPathComponent(fileName)
do {
try FileManager.default.createDirectory(
at: fileDir,
withIntermediateDirectories: true,
attributes: nil
)
} catch {
dPrint("Failed to create directory for download task \(task.taskId): \(error)")
return handleFailure(task: task, code: .writeFailed, filePath: filePath)
}
do {
try await db.write { conn in
try UploadTask.update {
$0.status = .downloadQueued
$0.isLivePhoto = isLivePhoto
$0.filePath = filePath
}.where { $0.id.eq(task.taskId) }.execute(conn)
}
} catch {
return dPrint("Failed to set file path for download task \(task.taskId): \(error)")
}
statusListener.onTaskStatus(
UploadApiTaskStatus(id: String(task.taskId), filename: filePath.path, status: .downloadQueued)
)
do {
let hash = try await download(task: task, asset: asset, resource: resource, to: filePath, deviceId: deviceId)
let status = try await db.write { conn in
if let hash { try LocalAsset.update { $0.checksum = hash }.where { $0.id.eq(task.localId) }.execute(conn) }
let status =
if let hash, try RemoteAsset.select(\.rowid).where({ $0.checksum.eq(hash) }).fetchOne(conn) != nil {
TaskStatus.uploadSkipped
} else {
TaskStatus.uploadPending
}
try UploadTask.update { $0.status = .uploadPending }.where { $0.id.eq(task.taskId) }.execute(conn)
return status
}
statusListener.onTaskStatus(
UploadApiTaskStatus(
id: String(task.taskId),
filename: filePath.path,
status: UploadApiStatus(rawValue: status.rawValue)!
)
)
uploadQueue.startQueueProcessing()
} catch {
dPrint("Download failed for task \(task.taskId): \(error)")
handleFailure(task: task, code: .writeFailed, filePath: filePath)
}
}
func download(
task: LocalAssetDownloadData,
asset: PHAsset,
resource: PHAssetResource,
to filePath: URL,
deviceId: String
) async throws
-> String?
{
dPrint("Downloading asset resource \(resource.assetLocalIdentifier) of type \(resource.type.rawValue)")
let options = PHAssetResourceRequestOptions()
options.isNetworkAccessAllowed = true
let (header, footer) = AssetData(
deviceAssetId: task.localId,
deviceId: deviceId,
fileCreatedAt: task.createdAt,
fileModifiedAt: task.updatedAt,
fileName: resource.originalFilename,
isFavorite: asset.isFavorite,
livePhotoVideoId: nil
).multipart()
guard let fileHandle = try? FileHandle.createOrOverwrite(atPath: filePath.path) else {
dPrint("Failed to open file handle for download task \(task.taskId), path: \(filePath.path)")
throw UploadError.fileCreationFailed
}
try fileHandle.write(contentsOf: header)
class RequestRef {
var id: PHAssetResourceDataRequestID?
var lastProgressTime = Date()
var didStall = false
}
var lastProgressTime = Date()
nonisolated(unsafe) let progressListener = self.progressListener
let taskIdStr = String(task.taskId)
options.progressHandler = { progress in
lastProgressTime = Date()
progressListener.onTaskProgress(UploadApiTaskProgress(id: taskIdStr, progress: progress))
}
let request = RequestRef()
let timeoutTask = Task {
while !Task.isCancelled {
try? await Task.sleep(nanoseconds: TaskConfig.downloadCheckIntervalNs)
request.didStall = Date().timeIntervalSince(lastProgressTime) > TaskConfig.downloadTimeoutS
if request.didStall {
if let requestId = request.id {
Self.resourceManager.cancelDataRequest(requestId)
}
break
}
}
}
return try await withTaskCancellationHandler {
try await withCheckedThrowingContinuation { continuation in
var hasher = task.checksum == nil && task.livePhotoVideoId == nil ? Insecure.SHA1() : nil
request.id = Self.resourceManager.requestData(
for: resource,
options: options,
dataReceivedHandler: { data in
guard let requestId = request.id else { return }
do {
hasher?.update(data: data)
try fileHandle.write(contentsOf: data)
} catch {
request.id = nil
Self.resourceManager.cancelDataRequest(requestId)
}
},
completionHandler: { error in
timeoutTask.cancel()
switch error {
case let e as NSError where e.domain == "CloudPhotoLibraryErrorDomain":
dPrint("iCloud error during download: \(e)")
let code: UploadErrorCode =
switch e.code {
case 1005: .iCloudRateLimit
case 81: .iCloudThrottled
default: .photosUnknownError
}
self.handleFailure(task: task, code: code, filePath: filePath)
case let e as PHPhotosError:
dPrint("Photos error during download: \(e)")
let code: UploadErrorCode =
switch e.code {
case .notEnoughSpace: .notEnoughSpace
case .missingResource: .resourceNotFound
case .networkError: .networkError
case .internalError: .photosInternalError
case .invalidResource: .invalidResource
case .operationInterrupted: .interrupted
case .userCancelled where request.didStall: .downloadStalled
case .userCancelled: .cancelled
default: .photosUnknownError
}
self.handleFailure(task: task, code: code, filePath: filePath)
case .some:
dPrint("Unknown error during download: \(String(describing: error))")
self.handleFailure(task: task, code: .unknown, filePath: filePath)
case .none:
dPrint("Download completed for task \(task.taskId)")
do {
try fileHandle.write(contentsOf: footer)
continuation.resume(returning: hasher.map { hasher in Data(hasher.finalize()).base64EncodedString() })
} catch {
try? FileManager.default.removeItem(at: filePath)
continuation.resume(throwing: error)
}
}
}
)
}
} onCancel: {
if let requestId = request.id {
Self.resourceManager.cancelDataRequest(requestId)
}
}
}
private func handleFailure(task: LocalAssetDownloadData, code: UploadErrorCode, filePath: URL? = nil) {
dPrint("Handling failure for task \(task.taskId) with code \(code.rawValue)")
do {
if let filePath {
try? FileManager.default.removeItem(at: filePath)
}
try db.write { conn in
try UploadTask.retryOrFail(code: code, status: .downloadFailed).where { $0.id.eq(task.taskId) }.execute(conn)
}
} catch {
dPrint("Failed to update download failure status for task \(task.taskId): \(error)")
}
}
}

View File

@@ -1,39 +0,0 @@
class StatusEventListener: StreamStatusStreamHandler {
var eventSink: PigeonEventSink<UploadApiTaskStatus>?
override func onListen(withArguments arguments: Any?, sink: PigeonEventSink<UploadApiTaskStatus>) {
eventSink = sink
}
func onTaskStatus(_ event: UploadApiTaskStatus) {
if let eventSink = eventSink {
eventSink.success(event)
}
}
func onEventsDone() {
eventSink?.endOfStream()
eventSink = nil
}
}
class ProgressEventListener: StreamProgressStreamHandler {
var eventSink: PigeonEventSink<UploadApiTaskProgress>?
override func onListen(withArguments arguments: Any?, sink: PigeonEventSink<UploadApiTaskProgress>) {
eventSink = sink
}
func onTaskProgress(_ event: UploadApiTaskProgress) {
if let eventSink = eventSink {
DispatchQueue.main.async { eventSink.success(event) }
}
}
func onEventsDone() {
DispatchQueue.main.async {
self.eventSink?.endOfStream()
self.eventSink = nil
}
}
}

View File

@@ -1,22 +0,0 @@
import Network
class NetworkMonitor {
static let shared = NetworkMonitor()
private let monitor = NWPathMonitor()
private(set) var isConnected = false
private(set) var isExpensive = false
private init() {
monitor.pathUpdateHandler = { [weak self] path in
guard let self else { return }
let wasConnected = self.isConnected
self.isConnected = path.status == .satisfied
self.isExpensive = path.isExpensive
if !wasConnected && self.isConnected {
NotificationCenter.default.post(name: .networkDidConnect, object: nil)
}
}
monitor.start(queue: .global(qos: .utility))
}
}

View File

@@ -1,221 +0,0 @@
import SQLiteData
import StructuredFieldValues
class UploadQueue {
private static let structuredEncoder = StructuredFieldValueEncoder()
private static var queueProcessingTask: Task<Void, Never>?
private static var queueProcessingLock = NSLock()
private let db: DatabasePool
private let cellularSession: URLSession
private let wifiOnlySession: URLSession
private let statusListener: StatusEventListener
init(db: DatabasePool, cellularSession: URLSession, wifiOnlySession: URLSession, statusListener: StatusEventListener)
{
self.db = db
self.cellularSession = cellularSession
self.wifiOnlySession = wifiOnlySession
self.statusListener = statusListener
}
func enqueueFiles(paths: [String]) async throws {
guard !paths.isEmpty else { return dPrint("No paths to enqueue") }
guard let deviceId = (try? await db.read { conn in try Store.get(conn, StoreKey.deviceId) }) else {
throw StoreError.notFound
}
defer { startQueueProcessing() }
try await withThrowingTaskGroup(of: Void.self, returning: Void.self) { group in
let date = Date()
try FileManager.default.createDirectory(
at: TaskConfig.originalsDir,
withIntermediateDirectories: true,
attributes: nil
)
for path in paths {
group.addTask {
let inputURL = URL(fileURLWithPath: path, isDirectory: false)
let outputURL = TaskConfig.originalsDir.appendingPathComponent(UUID().uuidString)
let resources = try inputURL.resourceValues(forKeys: [.creationDateKey, .contentModificationDateKey])
let formatter = ISO8601DateFormatter()
let (header, footer) = AssetData(
deviceAssetId: "",
deviceId: deviceId,
fileCreatedAt: formatter.string(from: resources.creationDate ?? date),
fileModifiedAt: formatter.string(from: resources.contentModificationDate ?? date),
fileName: resources.name ?? inputURL.lastPathComponent,
isFavorite: false,
livePhotoVideoId: nil
).multipart()
do {
let writeHandle = try FileHandle.createOrOverwrite(atPath: outputURL.path)
try writeHandle.write(contentsOf: header)
let readHandle = try FileHandle(forReadingFrom: inputURL)
let bufferSize = 1024 * 1024
while true {
let data = try readHandle.read(upToCount: bufferSize)
guard let data = data, !data.isEmpty else { break }
try writeHandle.write(contentsOf: data)
}
try writeHandle.write(contentsOf: footer)
} catch {
try? FileManager.default.removeItem(at: outputURL)
throw error
}
}
}
try await group.waitForAll()
}
try await db.write { conn in
var draft = UploadTask.Draft(
attempts: 0,
createdAt: Date(),
filePath: nil,
isLivePhoto: nil,
lastError: nil,
livePhotoVideoId: nil,
localId: "",
method: .multipart,
priority: 0.5,
retryAfter: nil,
status: .downloadPending,
)
for path in paths {
draft.filePath = URL(fileURLWithPath: path, isDirectory: false)
try UploadTask.insert { draft }.execute(conn)
}
}
dPrint("Enqueued \(paths.count) assets for upload")
}
func startQueueProcessing() {
dPrint("Starting upload queue processing")
Self.queueProcessingLock.withLock {
guard Self.queueProcessingTask == nil else { return }
Self.queueProcessingTask = Task {
await startUploads()
Self.queueProcessingLock.withLock { Self.queueProcessingTask = nil }
}
}
}
private func startUploads() async {
dPrint("Processing download queue")
guard NetworkMonitor.shared.isConnected,
let backupEnabled = try? await db.read({ conn in try Store.get(conn, StoreKey.enableBackup) }),
backupEnabled
else { return dPrint("Download queue paused: network disconnected or backup disabled") }
do {
let tasks: [LocalAssetUploadData] = try await db.read({ conn in
guard let backupEnabled = try Store.get(conn, StoreKey.enableBackup), backupEnabled else { return [] }
return try UploadTask.join(LocalAsset.all) { task, asset in task.localId.eq(asset.id) }
.where { task, asset in
asset.checksum.isNot(nil) && task.status.eq(TaskStatus.uploadPending)
&& task.attempts < TaskConfig.maxRetries
&& task.filePath.isNot(nil)
}
.select { task, asset in
LocalAssetUploadData.Columns(
filePath: task.filePath.unwrapped,
priority: task.priority,
taskId: task.id,
type: asset.type
)
}
.limit { task, _ in UploadTaskStat.availableUploadSlots }
.order { task, asset in (task.priority.desc(), task.createdAt) }
.fetchAll(conn)
})
if tasks.isEmpty { return dPrint("No upload tasks to process") }
await withTaskGroup(of: Void.self) { group in
for task in tasks {
group.addTask { await self.startUpload(multipart: task) }
}
}
} catch {
dPrint("Upload queue error: \(error)")
}
}
private func startUpload(multipart task: LocalAssetUploadData) async {
dPrint("Uploading asset resource at \(task.filePath) of task \(task.taskId)")
defer { startQueueProcessing() }
let (url, accessToken, session): (URL, String, URLSession)
do {
(url, accessToken, session) = try await db.read { conn in
guard let url = try Store.get(conn, StoreKey.serverEndpoint),
let accessToken = try Store.get(conn, StoreKey.accessToken)
else {
throw StoreError.notFound
}
let session =
switch task.type {
case .image:
(try? Store.get(conn, StoreKey.useWifiForUploadPhotos)) ?? false ? cellularSession : wifiOnlySession
case .video:
(try? Store.get(conn, StoreKey.useWifiForUploadVideos)) ?? false ? cellularSession : wifiOnlySession
default: wifiOnlySession
}
return (url, accessToken, session)
}
} catch {
dPrint("Upload failed for \(task.taskId), could not retrieve server URL or access token: \(error)")
return handleFailure(task: task, code: .noServerUrl)
}
var request = URLRequest(url: url.appendingPathComponent("/assets"))
request.httpMethod = "POST"
request.setValue(accessToken, forHTTPHeaderField: UploadHeaders.userToken.rawValue)
request.setValue(AssetData.contentType, forHTTPHeaderField: "Content-Type")
let sessionTask = session.uploadTask(with: request, fromFile: task.filePath)
sessionTask.taskDescription = String(task.taskId)
sessionTask.priority = task.priority
do {
try? FileManager.default.removeItem(at: task.filePath) // upload task already copied the file
try await db.write { conn in
try UploadTask.update { row in
row.status = .uploadQueued
row.filePath = nil
}
.where { $0.id.eq(task.taskId) }
.execute(conn)
}
statusListener.onTaskStatus(
UploadApiTaskStatus(
id: String(task.taskId),
filename: task.filePath.lastPathComponent,
status: .uploadQueued,
)
)
sessionTask.resume()
dPrint("Upload started for task \(task.taskId) using \(session == wifiOnlySession ? "WiFi" : "Cellular") session")
} catch {
dPrint("Upload failed for \(task.taskId), could not update queue status: \(error.localizedDescription)")
}
}
private func handleFailure(task: LocalAssetUploadData, code: UploadErrorCode) {
do {
try db.write { conn in
try UploadTask.retryOrFail(code: code, status: .uploadFailed).where { $0.id.eq(task.taskId) }.execute(conn)
}
} catch {
dPrint("Failed to update upload failure status for task \(task.taskId): \(error)")
}
}
}

View File

@@ -1,463 +0,0 @@
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
// See also: https://pub.dev/packages/pigeon
import Foundation
#if os(iOS)
import Flutter
#elseif os(macOS)
import FlutterMacOS
#else
#error("Unsupported platform.")
#endif
private func wrapResult(_ result: Any?) -> [Any?] {
return [result]
}
private func wrapError(_ error: Any) -> [Any?] {
if let pigeonError = error as? PigeonError {
return [
pigeonError.code,
pigeonError.message,
pigeonError.details,
]
}
if let flutterError = error as? FlutterError {
return [
flutterError.code,
flutterError.message,
flutterError.details,
]
}
return [
"\(error)",
"\(type(of: error))",
"Stacktrace: \(Thread.callStackSymbols)",
]
}
private func isNullish(_ value: Any?) -> Bool {
return value is NSNull || value == nil
}
private func nilOrValue<T>(_ value: Any?) -> T? {
if value is NSNull { return nil }
return value as! T?
}
func deepEqualsUploadTask(_ lhs: Any?, _ rhs: Any?) -> Bool {
let cleanLhs = nilOrValue(lhs) as Any?
let cleanRhs = nilOrValue(rhs) as Any?
switch (cleanLhs, cleanRhs) {
case (nil, nil):
return true
case (nil, _), (_, nil):
return false
case is (Void, Void):
return true
case let (cleanLhsHashable, cleanRhsHashable) as (AnyHashable, AnyHashable):
return cleanLhsHashable == cleanRhsHashable
case let (cleanLhsArray, cleanRhsArray) as ([Any?], [Any?]):
guard cleanLhsArray.count == cleanRhsArray.count else { return false }
for (index, element) in cleanLhsArray.enumerated() {
if !deepEqualsUploadTask(element, cleanRhsArray[index]) {
return false
}
}
return true
case let (cleanLhsDictionary, cleanRhsDictionary) as ([AnyHashable: Any?], [AnyHashable: Any?]):
guard cleanLhsDictionary.count == cleanRhsDictionary.count else { return false }
for (key, cleanLhsValue) in cleanLhsDictionary {
guard cleanRhsDictionary.index(forKey: key) != nil else { return false }
if !deepEqualsUploadTask(cleanLhsValue, cleanRhsDictionary[key]!) {
return false
}
}
return true
default:
// Any other type shouldn't be able to be used with pigeon. File an issue if you find this to be untrue.
return false
}
}
func deepHashUploadTask(value: Any?, hasher: inout Hasher) {
if let valueList = value as? [AnyHashable] {
for item in valueList { deepHashUploadTask(value: item, hasher: &hasher) }
return
}
if let valueDict = value as? [AnyHashable: AnyHashable] {
for key in valueDict.keys {
hasher.combine(key)
deepHashUploadTask(value: valueDict[key]!, hasher: &hasher)
}
return
}
if let hashableValue = value as? AnyHashable {
hasher.combine(hashableValue.hashValue)
}
return hasher.combine(String(describing: value))
}
enum UploadApiErrorCode: Int {
case unknown = 0
case assetNotFound = 1
case fileNotFound = 2
case resourceNotFound = 3
case invalidResource = 4
case encodingFailed = 5
case writeFailed = 6
case notEnoughSpace = 7
case networkError = 8
case photosInternalError = 9
case photosUnknownError = 10
case noServerUrl = 11
case noDeviceId = 12
case noAccessToken = 13
case interrupted = 14
case cancelled = 15
case downloadStalled = 16
case forceQuit = 17
case outOfResources = 18
case backgroundUpdatesDisabled = 19
case uploadTimeout = 20
case iCloudRateLimit = 21
case iCloudThrottled = 22
}
enum UploadApiStatus: Int {
case downloadPending = 0
case downloadQueued = 1
case downloadFailed = 2
case uploadPending = 3
case uploadQueued = 4
case uploadFailed = 5
case uploadComplete = 6
case uploadSkipped = 7
}
/// Generated class from Pigeon that represents data sent in messages.
struct UploadApiTaskStatus: Hashable {
var id: String
var filename: String
var status: UploadApiStatus
var errorCode: UploadApiErrorCode? = nil
var httpStatusCode: Int64? = nil
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> UploadApiTaskStatus? {
let id = pigeonVar_list[0] as! String
let filename = pigeonVar_list[1] as! String
let status = pigeonVar_list[2] as! UploadApiStatus
let errorCode: UploadApiErrorCode? = nilOrValue(pigeonVar_list[3])
let httpStatusCode: Int64? = nilOrValue(pigeonVar_list[4])
return UploadApiTaskStatus(
id: id,
filename: filename,
status: status,
errorCode: errorCode,
httpStatusCode: httpStatusCode
)
}
func toList() -> [Any?] {
return [
id,
filename,
status,
errorCode,
httpStatusCode,
]
}
static func == (lhs: UploadApiTaskStatus, rhs: UploadApiTaskStatus) -> Bool {
return deepEqualsUploadTask(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashUploadTask(value: toList(), hasher: &hasher)
}
}
/// Generated class from Pigeon that represents data sent in messages.
struct UploadApiTaskProgress: Hashable {
var id: String
var progress: Double
var speed: Double? = nil
var totalBytes: Int64? = nil
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> UploadApiTaskProgress? {
let id = pigeonVar_list[0] as! String
let progress = pigeonVar_list[1] as! Double
let speed: Double? = nilOrValue(pigeonVar_list[2])
let totalBytes: Int64? = nilOrValue(pigeonVar_list[3])
return UploadApiTaskProgress(
id: id,
progress: progress,
speed: speed,
totalBytes: totalBytes
)
}
func toList() -> [Any?] {
return [
id,
progress,
speed,
totalBytes,
]
}
static func == (lhs: UploadApiTaskProgress, rhs: UploadApiTaskProgress) -> Bool {
return deepEqualsUploadTask(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashUploadTask(value: toList(), hasher: &hasher)
}
}
private class UploadTaskPigeonCodecReader: FlutterStandardReader {
override func readValue(ofType type: UInt8) -> Any? {
switch type {
case 129:
let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?)
if let enumResultAsInt = enumResultAsInt {
return UploadApiErrorCode(rawValue: enumResultAsInt)
}
return nil
case 130:
let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?)
if let enumResultAsInt = enumResultAsInt {
return UploadApiStatus(rawValue: enumResultAsInt)
}
return nil
case 131:
return UploadApiTaskStatus.fromList(self.readValue() as! [Any?])
case 132:
return UploadApiTaskProgress.fromList(self.readValue() as! [Any?])
default:
return super.readValue(ofType: type)
}
}
}
private class UploadTaskPigeonCodecWriter: FlutterStandardWriter {
override func writeValue(_ value: Any) {
if let value = value as? UploadApiErrorCode {
super.writeByte(129)
super.writeValue(value.rawValue)
} else if let value = value as? UploadApiStatus {
super.writeByte(130)
super.writeValue(value.rawValue)
} else if let value = value as? UploadApiTaskStatus {
super.writeByte(131)
super.writeValue(value.toList())
} else if let value = value as? UploadApiTaskProgress {
super.writeByte(132)
super.writeValue(value.toList())
} else {
super.writeValue(value)
}
}
}
private class UploadTaskPigeonCodecReaderWriter: FlutterStandardReaderWriter {
override func reader(with data: Data) -> FlutterStandardReader {
return UploadTaskPigeonCodecReader(data: data)
}
override func writer(with data: NSMutableData) -> FlutterStandardWriter {
return UploadTaskPigeonCodecWriter(data: data)
}
}
class UploadTaskPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable {
static let shared = UploadTaskPigeonCodec(readerWriter: UploadTaskPigeonCodecReaderWriter())
}
var uploadTaskPigeonMethodCodec = FlutterStandardMethodCodec(readerWriter: UploadTaskPigeonCodecReaderWriter());
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol UploadApi {
func initialize(completion: @escaping (Result<Void, Error>) -> Void)
func refresh(completion: @escaping (Result<Void, Error>) -> Void)
func cancelAll(completion: @escaping (Result<Void, Error>) -> Void)
func enqueueAssets(localIds: [String], completion: @escaping (Result<Void, Error>) -> Void)
func enqueueFiles(paths: [String], completion: @escaping (Result<Void, Error>) -> Void)
}
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
class UploadApiSetup {
static var codec: FlutterStandardMessageCodec { UploadTaskPigeonCodec.shared }
/// Sets up an instance of `UploadApi` to handle messages through the `binaryMessenger`.
static func setUp(binaryMessenger: FlutterBinaryMessenger, api: UploadApi?, messageChannelSuffix: String = "") {
let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : ""
let initializeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.initialize\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
initializeChannel.setMessageHandler { _, reply in
api.initialize { result in
switch result {
case .success:
reply(wrapResult(nil))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
initializeChannel.setMessageHandler(nil)
}
let refreshChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.refresh\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
refreshChannel.setMessageHandler { _, reply in
api.refresh { result in
switch result {
case .success:
reply(wrapResult(nil))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
refreshChannel.setMessageHandler(nil)
}
let cancelAllChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.cancelAll\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
cancelAllChannel.setMessageHandler { _, reply in
api.cancelAll { result in
switch result {
case .success:
reply(wrapResult(nil))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
cancelAllChannel.setMessageHandler(nil)
}
let enqueueAssetsChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
enqueueAssetsChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let localIdsArg = args[0] as! [String]
api.enqueueAssets(localIds: localIdsArg) { result in
switch result {
case .success:
reply(wrapResult(nil))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
enqueueAssetsChannel.setMessageHandler(nil)
}
let enqueueFilesChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueFiles\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
enqueueFilesChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let pathsArg = args[0] as! [String]
api.enqueueFiles(paths: pathsArg) { result in
switch result {
case .success:
reply(wrapResult(nil))
case .failure(let error):
reply(wrapError(error))
}
}
}
} else {
enqueueFilesChannel.setMessageHandler(nil)
}
}
}
private class PigeonStreamHandler<ReturnType>: NSObject, FlutterStreamHandler {
private let wrapper: PigeonEventChannelWrapper<ReturnType>
private var pigeonSink: PigeonEventSink<ReturnType>? = nil
init(wrapper: PigeonEventChannelWrapper<ReturnType>) {
self.wrapper = wrapper
}
func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink)
-> FlutterError?
{
pigeonSink = PigeonEventSink<ReturnType>(events)
wrapper.onListen(withArguments: arguments, sink: pigeonSink!)
return nil
}
func onCancel(withArguments arguments: Any?) -> FlutterError? {
pigeonSink = nil
wrapper.onCancel(withArguments: arguments)
return nil
}
}
class PigeonEventChannelWrapper<ReturnType> {
func onListen(withArguments arguments: Any?, sink: PigeonEventSink<ReturnType>) {}
func onCancel(withArguments arguments: Any?) {}
}
class PigeonEventSink<ReturnType> {
private let sink: FlutterEventSink
init(_ sink: @escaping FlutterEventSink) {
self.sink = sink
}
func success(_ value: ReturnType) {
sink(value)
}
func error(code: String, message: String?, details: Any?) {
sink(FlutterError(code: code, message: message, details: details))
}
func endOfStream() {
sink(FlutterEndOfEventStream)
}
}
class StreamStatusStreamHandler: PigeonEventChannelWrapper<UploadApiTaskStatus> {
static func register(with messenger: FlutterBinaryMessenger,
instanceName: String = "",
streamHandler: StreamStatusStreamHandler) {
var channelName = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamStatus"
if !instanceName.isEmpty {
channelName += ".\(instanceName)"
}
let internalStreamHandler = PigeonStreamHandler<UploadApiTaskStatus>(wrapper: streamHandler)
let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: uploadTaskPigeonMethodCodec)
channel.setStreamHandler(internalStreamHandler)
}
}
class StreamProgressStreamHandler: PigeonEventChannelWrapper<UploadApiTaskProgress> {
static func register(with messenger: FlutterBinaryMessenger,
instanceName: String = "",
streamHandler: StreamProgressStreamHandler) {
var channelName = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamProgress"
if !instanceName.isEmpty {
channelName += ".\(instanceName)"
}
let internalStreamHandler = PigeonStreamHandler<UploadApiTaskProgress>(wrapper: streamHandler)
let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: uploadTaskPigeonMethodCodec)
channel.setStreamHandler(internalStreamHandler)
}
}

View File

@@ -1,271 +0,0 @@
import SQLiteData
import StructuredFieldValues
extension FileHandle {
static func createOrOverwrite(atPath path: String) throws -> FileHandle {
let fd = open(path, O_WRONLY | O_CREAT | O_TRUNC, 0o644)
guard fd >= 0 else {
throw NSError(domain: NSPOSIXErrorDomain, code: Int(errno))
}
return FileHandle(fileDescriptor: fd, closeOnDealloc: true)
}
}
class UploadApiImpl: ImmichPlugin, UploadApi {
private let db: DatabasePool
private let downloadQueue: DownloadQueue
private let uploadQueue: UploadQueue
private var isInitialized = false
private let initLock = NSLock()
private var backupTask: Task<Void, Never>?
private let backupLock = NSLock()
private let cellularSession: URLSession
private let wifiOnlySession: URLSession
init(statusListener: StatusEventListener, progressListener: ProgressEventListener) {
let dbUrl = try! FileManager.default.url(
for: .documentDirectory,
in: .userDomainMask,
appropriateFor: nil,
create: true
).appendingPathComponent("immich.sqlite")
self.db = try! DatabasePool(path: dbUrl.path)
let cellularConfig = URLSessionConfiguration.background(withIdentifier: "\(TaskConfig.sessionId).cellular")
cellularConfig.allowsCellularAccess = true
cellularConfig.waitsForConnectivity = true
let delegate = UploadApiDelegate(db: db, statusListener: statusListener, progressListener: progressListener)
self.cellularSession = URLSession(configuration: cellularConfig, delegate: delegate, delegateQueue: nil)
let wifiOnlyConfig = URLSessionConfiguration.background(withIdentifier: "\(TaskConfig.sessionId).wifi")
wifiOnlyConfig.allowsCellularAccess = false
wifiOnlyConfig.waitsForConnectivity = true
self.wifiOnlySession = URLSession(configuration: wifiOnlyConfig, delegate: delegate, delegateQueue: nil)
self.uploadQueue = UploadQueue(
db: db,
cellularSession: cellularSession,
wifiOnlySession: wifiOnlySession,
statusListener: statusListener
)
self.downloadQueue = DownloadQueue(
db: db,
uploadQueue: uploadQueue,
statusListener: statusListener,
progressListener: progressListener
)
delegate.downloadQueue = downloadQueue
delegate.uploadQueue = uploadQueue
}
func initialize(completion: @escaping (Result<Void, any Error>) -> Void) {
Task(priority: .high) {
do {
async let dbIds = db.read { conn in
try UploadTask.select(\.id).where { $0.status.eq(TaskStatus.uploadQueued) }.fetchAll(conn)
}
async let cellularTasks = cellularSession.allTasks
async let wifiTasks = wifiOnlySession.allTasks
var dbTaskIds = Set(try await dbIds)
func validateTasks(_ tasks: [URLSessionTask]) {
for task in tasks {
if let taskIdStr = task.taskDescription, let taskId = Int64(taskIdStr), task.state != .canceling {
dbTaskIds.remove(taskId)
} else {
task.cancel()
}
}
}
validateTasks(await cellularTasks)
validateTasks(await wifiTasks)
let orphanIds = Array(dbTaskIds)
try await db.write { conn in
try UploadTask.update {
$0.filePath = nil
$0.status = .downloadPending
}
.where { row in row.status.in([TaskStatus.downloadQueued, TaskStatus.uploadPending]) || row.id.in(orphanIds) }
.execute(conn)
}
try? FileManager.default.removeItem(at: TaskConfig.originalsDir)
initLock.withLock { isInitialized = true }
startBackup()
self.completeWhenActive(for: completion, with: .success(()))
} catch {
self.completeWhenActive(for: completion, with: .failure(error))
}
}
}
func refresh(completion: @escaping (Result<Void, any Error>) -> Void) {
Task {
startBackup()
self.completeWhenActive(for: completion, with: .success(()))
}
}
func startBackup() {
dPrint("Starting backup task")
guard (initLock.withLock { isInitialized }) else { return dPrint("Not initialized, skipping backup") }
backupLock.withLock {
guard backupTask == nil else { return dPrint("Backup task already running") }
backupTask = Task {
await _startBackup()
backupLock.withLock { backupTask = nil }
}
}
}
func cancelAll(completion: @escaping (Result<Void, any Error>) -> Void) {
Task {
async let cellularTasks = cellularSession.allTasks
async let wifiTasks = wifiOnlySession.allTasks
cancelSessionTasks(await cellularTasks)
cancelSessionTasks(await wifiTasks)
self.completeWhenActive(for: completion, with: .success(()))
}
}
func enqueueAssets(localIds: [String], completion: @escaping (Result<Void, any Error>) -> Void) {
Task {
do {
try await downloadQueue.enqueueAssets(localIds: localIds)
completion(.success(()))
} catch {
completion(.failure(error))
}
}
}
func enqueueFiles(paths: [String], completion: @escaping (Result<Void, any Error>) -> Void) {
Task {
do {
try await uploadQueue.enqueueFiles(paths: paths)
completion(.success(()))
} catch {
completion(.failure(error))
}
}
}
private func cancelSessionTasks(_ tasks: [URLSessionTask]) {
dPrint("Canceling \(tasks.count) tasks")
for task in tasks {
task.cancel()
}
}
private func _startBackup() async {
defer { downloadQueue.startQueueProcessing() }
do {
let candidates = try await db.read { conn in
return try LocalAsset.getCandidates()
.where { asset in !UploadTask.where { task in task.localId.eq(asset.id) }.exists() }
.select { LocalAssetCandidate.Columns(id: $0.id, type: $0.type) }
.limit { _ in UploadTaskStat.availableSlots }
.fetchAll(conn)
}
guard !candidates.isEmpty else { return dPrint("No candidates for backup") }
try await db.write { conn in
var draft = UploadTask.Draft(
attempts: 0,
createdAt: Date(),
filePath: nil,
isLivePhoto: nil,
lastError: nil,
livePhotoVideoId: nil,
localId: "",
method: .multipart,
priority: 0.5,
retryAfter: nil,
status: .downloadPending,
)
for candidate in candidates {
draft.localId = candidate.id
draft.priority = candidate.type == .image ? 0.5 : 0.3
try UploadTask.insert {
draft
} onConflict: {
($0.localId, $0.livePhotoVideoId)
}
.execute(conn)
}
}
dPrint("Backup enqueued \(candidates.count) assets for upload")
} catch {
print("Backup queue error: \(error)")
}
}
}
struct AssetData: StructuredFieldValue {
static let structuredFieldType: StructuredFieldType = .dictionary
let deviceAssetId: String
let deviceId: String
let fileCreatedAt: String
let fileModifiedAt: String
let fileName: String
let isFavorite: Bool
let livePhotoVideoId: String?
static let boundary = "Boundary-\(UUID().uuidString)"
static let deviceAssetIdField = "--\(boundary)\r\nContent-Disposition: form-data; name=\"deviceAssetId\"\r\n\r\n"
.data(using: .utf8)!
static let deviceIdField = "\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"deviceId\"\r\n\r\n"
.data(using: .utf8)!
static let fileCreatedAtField =
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"fileCreatedAt\"\r\n\r\n"
.data(using: .utf8)!
static let fileModifiedAtField =
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"fileModifiedAt\"\r\n\r\n"
.data(using: .utf8)!
static let isFavoriteField = "\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"isFavorite\"\r\n\r\n"
.data(using: .utf8)!
static let livePhotoVideoIdField =
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"livePhotoVideoId\"\r\n\r\n"
.data(using: .utf8)!
static let trueData = "true".data(using: .utf8)!
static let falseData = "false".data(using: .utf8)!
static let footer = "\r\n--\(boundary)--\r\n".data(using: .utf8)!
static let contentType = "multipart/form-data; boundary=\(boundary)"
func multipart() -> (Data, Data) {
var header = Data()
header.append(Self.deviceAssetIdField)
header.append(deviceAssetId.data(using: .utf8)!)
header.append(Self.deviceIdField)
header.append(deviceId.data(using: .utf8)!)
header.append(Self.fileCreatedAtField)
header.append(fileCreatedAt.data(using: .utf8)!)
header.append(Self.fileModifiedAtField)
header.append(fileModifiedAt.data(using: .utf8)!)
header.append(Self.isFavoriteField)
header.append(isFavorite ? Self.trueData : Self.falseData)
if let livePhotoVideoId {
header.append(Self.livePhotoVideoIdField)
header.append(livePhotoVideoId.data(using: .utf8)!)
}
header.append(
"\r\n--\(Self.boundary)\r\nContent-Disposition: form-data; name=\"assetData\"; filename=\"\(fileName)\"\r\nContent-Type: application/octet-stream\r\n\r\n"
.data(using: .utf8)!
)
return (header, Self.footer)
}
}

View File

@@ -1,35 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>AppGroupId</key>
<string>$(CUSTOM_GROUP_ID)</string>
<key>NSExtension</key>
<dict>
<key>NSExtensionAttributes</key>
<dict>
<key>IntentsSupported</key>
<array>
<string>INSendMessageIntent</string>
</array>
<key>NSExtensionActivationRule</key>
<string>SUBQUERY ( extensionItems, $extensionItem, SUBQUERY ( $extensionItem.attachments,
<dict>
<key>AppGroupId</key>
<string>$(CUSTOM_GROUP_ID)</string>
<key>NSExtension</key>
<dict>
<key>NSExtensionAttributes</key>
<dict>
<key>IntentsSupported</key>
<array>
<string>INSendMessageIntent</string>
</array>
<key>NSExtensionActivationRule</key>
<string>SUBQUERY ( extensionItems, $extensionItem, SUBQUERY ( $extensionItem.attachments,
$attachment, ( ANY $attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.file-url"
|| ANY $attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.image" || ANY
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.text" || ANY
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.movie" || ANY
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.url" ) ).@count &gt; 0
).@count &gt; 0 </string>
<key>PHSupportedMediaTypes</key>
<array>
<string>Video</string>
<string>Image</string>
</array>
</dict>
<key>NSExtensionMainStoryboard</key>
<string>MainInterface</string>
<key>NSExtensionPointIdentifier</key>
<string>com.apple.share-services</string>
</dict>
</dict>
</plist>
<key>PHSupportedMediaTypes</key>
<array>
<string>Video</string>
<string>Image</string>
</array>
</dict>
<key>NSExtensionMainStoryboard</key>
<string>MainInterface</string>
<key>NSExtensionPointIdentifier</key>
<string>com.apple.share-services</string>
</dict>
</dict>
</plist>

View File

@@ -3,6 +3,8 @@
<plist version="1.0">
<dict>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.app.immich.share</string>
</array>
</dict>
</plist>

View File

@@ -3,6 +3,8 @@
<plist version="1.0">
<dict>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.app.immich.share</string>
</array>
</dict>
</plist>

View File

@@ -112,7 +112,7 @@ end
workspace: "Runner.xcworkspace",
configuration: configuration,
export_method: "app-store",
xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
xcargs: "CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
export_options: {
provisioningProfiles: {
"#{app_identifier}" => "#{app_identifier} AppStore",
@@ -195,7 +195,7 @@ end
configuration: "Release",
export_method: "app-store",
skip_package_ipa: false,
xcargs: "-skipMacroValidation -allowProvisioningUpdates",
xcargs: "-allowProvisioningUpdates",
export_options: {
method: "app-store",
signingStyle: "automatic",
@@ -210,37 +210,4 @@ end
)
end
desc "iOS Build Only (no TestFlight upload)"
lane :gha_build_only do
# Use the same build process as production, just skip the upload
# This ensures PR builds validate the same way as production builds
# Install provisioning profiles (use development profiles for PR builds)
install_provisioning_profile(path: "profile_dev.mobileprovision")
install_provisioning_profile(path: "profile_dev_share.mobileprovision")
install_provisioning_profile(path: "profile_dev_widget.mobileprovision")
# Configure code signing for dev bundle IDs
configure_code_signing(bundle_id_suffix: "development")
# Build the app (same as gha_testflight_dev but without upload)
build_app(
scheme: "Runner",
workspace: "Runner.xcworkspace",
configuration: "Release",
export_method: "app-store",
skip_package_ipa: true,
xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
export_options: {
provisioningProfiles: {
"#{BASE_BUNDLE_ID}.development" => "#{BASE_BUNDLE_ID}.development AppStore",
"#{BASE_BUNDLE_ID}.development.ShareExtension" => "#{BASE_BUNDLE_ID}.development.ShareExtension AppStore",
"#{BASE_BUNDLE_ID}.development.Widget" => "#{BASE_BUNDLE_ID}.development.Widget AppStore"
},
signingStyle: "manual",
signingCertificate: CODE_SIGN_IDENTITY
}
)
end
end

View File

@@ -58,6 +58,3 @@ const int kPhotoTabIndex = 0;
const int kSearchTabIndex = 1;
const int kAlbumTabIndex = 2;
const int kLibraryTabIndex = 3;
// Workaround for SQLite's variable limit (SQLITE_MAX_VARIABLE_NUMBER = 32766)
const int kDriftMaxChunk = 32000;

View File

@@ -7,3 +7,5 @@ enum AssetVisibilityEnum { timeline, hidden, archive, locked }
enum SortUserBy { id }
enum ActionSource { timeline, viewer }
enum UploadErrorType { none, network, client, server, unknown }

View File

@@ -1,11 +1,15 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui';
import 'package:background_downloader/background_downloader.dart';
import 'package:cancellation_token_http/http.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/services/log.service.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/network_capability_extensions.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
@@ -13,13 +17,16 @@ import 'package:immich_mobile/platform/background_worker_api.g.dart';
import 'package:immich_mobile/platform/background_worker_lock_api.g.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/services/auth.service.dart';
import 'package:immich_mobile/services/localization.service.dart';
import 'package:immich_mobile/services/upload.service.dart';
import 'package:immich_mobile/utils/bootstrap.dart';
import 'package:immich_mobile/utils/debug_print.dart';
import 'package:immich_mobile/utils/http_ssl_options.dart';
@@ -89,10 +96,23 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
loadTranslations(),
workerManagerPatch.init(dynamicSpawning: true),
_ref?.read(authServiceProvider).setOpenApiServiceEndpoint(),
// Initialize the file downloader
FileDownloader().configure(
globalConfig: [
// maxConcurrent: 6, maxConcurrentByHost(server):6, maxConcurrentByGroup: 3
(Config.holdingQueue, (6, 6, 3)),
// On Android, if files are larger than 256MB, run in foreground service
(Config.runInForegroundIfFileLargerThan, 256),
],
),
FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false),
FileDownloader().trackTasks(),
_ref?.read(fileMediaRepositoryProvider).enableBackgroundAccess(),
].nonNulls,
);
configureFileDownloaderNotifications();
// Notify the host that the background worker service has been initialized and is ready to use
unawaited(_backgroundHostApi.onInitialized());
} catch (error, stack) {
@@ -110,7 +130,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
_logger.warning("Remote sync did not complete successfully, skipping backup");
return;
}
await uploadApi.refresh();
await _handleBackup();
} catch (error, stack) {
_logger.severe("Failed to complete Android background processing", error, stack);
} finally {
@@ -130,7 +150,13 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
_logger.warning("Remote sync did not complete successfully, skipping backup");
return;
}
await uploadApi.refresh();
final backupFuture = _handleBackup();
if (maxSeconds != null) {
await backupFuture.timeout(Duration(seconds: maxSeconds - 1), onTimeout: () {});
} else {
await backupFuture;
}
} catch (error, stack) {
_logger.severe("Failed to complete iOS background upload", error, stack);
} finally {
@@ -188,6 +214,39 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
}
}
Future<void> _handleBackup() async {
await runZonedGuarded(
() async {
if (_isCleanedUp) {
return;
}
if (!_isBackupEnabled) {
_logger.info("Backup is disabled. Skipping backup routine");
return;
}
final currentUser = _ref?.read(currentUserProvider);
if (currentUser == null) {
_logger.warning("No current user found. Skipping backup from background");
return;
}
if (Platform.isIOS) {
return _ref?.read(driftBackupProvider.notifier).handleBackupResume(currentUser.id);
}
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
return _ref
?.read(uploadServiceProvider)
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
},
(error, stack) {
dPrint(() => "Error in backup zone $error, $stack");
},
);
}
Future<bool> _syncAssets({Duration? hashTimeout}) async {
await _ref?.read(backgroundSyncProvider).syncLocal();
if (_isCleanedUp) {

View File

@@ -2,10 +2,8 @@ import 'package:flutter/services.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:logging/logging.dart';
@@ -15,7 +13,6 @@ class HashService {
final int _batchSize;
final DriftLocalAlbumRepository _localAlbumRepository;
final DriftLocalAssetRepository _localAssetRepository;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final NativeSyncApi _nativeSyncApi;
final bool Function()? _cancelChecker;
final _log = Logger('HashService');
@@ -23,13 +20,11 @@ class HashService {
HashService({
required DriftLocalAlbumRepository localAlbumRepository,
required DriftLocalAssetRepository localAssetRepository,
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required NativeSyncApi nativeSyncApi,
bool Function()? cancelChecker,
int? batchSize,
}) : _localAlbumRepository = localAlbumRepository,
_localAssetRepository = localAssetRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_cancelChecker = cancelChecker,
_nativeSyncApi = nativeSyncApi,
_batchSize = batchSize ?? kBatchHashFileLimit;
@@ -54,14 +49,6 @@ class HashService {
await _hashAssets(album, assetsToHash);
}
}
if (CurrentPlatform.isAndroid && localAlbums.isNotEmpty) {
final backupAlbumIds = localAlbums.map((e) => e.id);
final trashedToHash = await _trashedLocalAssetRepository.getAssetsToHash(backupAlbumIds);
if (trashedToHash.isNotEmpty) {
final pseudoAlbum = LocalAlbum(id: '-pseudoAlbum', name: 'Trash', updatedAt: DateTime.now());
await _hashAssets(pseudoAlbum, trashedToHash, isTrashed: true);
}
}
} on PlatformException catch (e) {
if (e.code == _kHashCancelledCode) {
_log.warning("Hashing cancelled by platform");
@@ -78,7 +65,7 @@ class HashService {
/// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB
/// with hash for those that were successfully hashed. Hashes are looked up in a table
/// [LocalAssetHashEntity] by local id. Only missing entries are newly hashed and added to the DB.
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash, {bool isTrashed = false}) async {
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash) async {
final toHash = <String, LocalAsset>{};
for (final asset in assetsToHash) {
@@ -89,16 +76,16 @@ class HashService {
toHash[asset.id] = asset;
if (toHash.length == _batchSize) {
await _processBatch(album, toHash, isTrashed);
await _processBatch(album, toHash);
toHash.clear();
}
}
await _processBatch(album, toHash, isTrashed);
await _processBatch(album, toHash);
}
/// Processes a batch of assets.
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash, bool isTrashed) async {
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash) async {
if (toHash.isEmpty) {
return;
}
@@ -133,10 +120,7 @@ class HashService {
}
_log.fine("Hashed ${hashed.length}/${toHash.length} assets");
if (isTrashed) {
await _trashedLocalAssetRepository.updateHashes(hashed);
} else {
await _localAssetRepository.updateHashes(hashed);
}
await _localAssetRepository.updateHashes(hashed);
}
}

View File

@@ -4,14 +4,9 @@ import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:immich_mobile/utils/datetime_helpers.dart';
import 'package:immich_mobile/utils/diff.dart';
import 'package:logging/logging.dart';
@@ -19,34 +14,15 @@ import 'package:logging/logging.dart';
class LocalSyncService {
final DriftLocalAlbumRepository _localAlbumRepository;
final NativeSyncApi _nativeSyncApi;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final LocalFilesManagerRepository _localFilesManager;
final StorageRepository _storageRepository;
final Logger _log = Logger("DeviceSyncService");
LocalSyncService({
required DriftLocalAlbumRepository localAlbumRepository,
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required LocalFilesManagerRepository localFilesManager,
required StorageRepository storageRepository,
required NativeSyncApi nativeSyncApi,
}) : _localAlbumRepository = localAlbumRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_localFilesManager = localFilesManager,
_storageRepository = storageRepository,
_nativeSyncApi = nativeSyncApi;
LocalSyncService({required DriftLocalAlbumRepository localAlbumRepository, required NativeSyncApi nativeSyncApi})
: _localAlbumRepository = localAlbumRepository,
_nativeSyncApi = nativeSyncApi;
Future<void> sync({bool full = false}) async {
final Stopwatch stopwatch = Stopwatch()..start();
try {
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
final hasPermission = await _localFilesManager.hasManageMediaPermission();
if (hasPermission) {
await _syncTrashedAssets();
} else {
_log.warning("syncTrashedAssets cannot proceed because MANAGE_MEDIA permission is missing");
}
}
if (full || await _nativeSyncApi.shouldFullSync()) {
_log.fine("Full sync request from ${full ? "user" : "native"}");
return await fullSync();
@@ -93,6 +69,7 @@ class LocalSyncService {
await updateAlbum(dbAlbum, album);
}
}
await _nativeSyncApi.checkpointSync();
} catch (e, s) {
_log.severe("Error performing device sync", e, s);
@@ -296,48 +273,6 @@ class LocalSyncService {
bool _albumsEqual(LocalAlbum a, LocalAlbum b) {
return a.name == b.name && a.assetCount == b.assetCount && a.updatedAt.isAtSameMomentAs(b.updatedAt);
}
Future<void> _syncTrashedAssets() async {
final trashedAssetMap = await _nativeSyncApi.getTrashedAssets();
await processTrashedAssets(trashedAssetMap);
}
@visibleForTesting
Future<void> processTrashedAssets(Map<String, List<PlatformAsset>> trashedAssetMap) async {
if (trashedAssetMap.isEmpty) {
_log.info("syncTrashedAssets, No trashed assets found");
}
final trashedAssets = trashedAssetMap.cast<String, List<Object?>>().entries.expand(
(entry) => entry.value.cast<PlatformAsset>().toTrashedAssets(entry.key),
);
_log.fine("syncTrashedAssets, trashedAssets: ${trashedAssets.map((e) => e.asset.id)}");
await _trashedLocalAssetRepository.processTrashSnapshot(trashedAssets);
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
if (assetsToRestore.isNotEmpty) {
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
} else {
_log.info("syncTrashedAssets, No remote assets found for restoration");
}
final localAssetsToTrash = await _trashedLocalAssetRepository.getToTrash();
if (localAssetsToTrash.isNotEmpty) {
final mediaUrls = await Future.wait(
localAssetsToTrash.values
.expand((e) => e)
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
);
_log.info("Moving to trash ${mediaUrls.join(", ")} assets");
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
if (result) {
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
}
} else {
_log.info("syncTrashedAssets, No assets found in backup-enabled albums for move to trash");
}
}
}
extension on Iterable<PlatformAlbum> {
@@ -355,26 +290,20 @@ extension on Iterable<PlatformAlbum> {
extension on Iterable<PlatformAsset> {
List<LocalAsset> toLocalAssets() {
return map((e) => e.toLocalAsset()).toList();
}
Iterable<TrashedAsset> toTrashedAssets(String albumId) {
return map((e) => (albumId: albumId, asset: e.toLocalAsset()));
return map(
(e) => LocalAsset(
id: e.id,
name: e.name,
checksum: null,
type: AssetType.values.elementAtOrNull(e.type) ?? AssetType.other,
createdAt: tryFromSecondsSinceEpoch(e.createdAt, isUtc: true) ?? DateTime.timestamp(),
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt, isUtc: true) ?? DateTime.timestamp(),
width: e.width,
height: e.height,
durationInSeconds: e.durationInSeconds,
orientation: e.orientation,
isFavorite: e.isFavorite,
),
).toList();
}
}
extension on PlatformAsset {
LocalAsset toLocalAsset() => LocalAsset(
id: id,
name: name,
checksum: null,
type: AssetType.values.elementAtOrNull(type) ?? AssetType.other,
createdAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
updatedAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
width: width,
height: height,
durationInSeconds: durationInSeconds,
isFavorite: isFavorite,
orientation: orientation,
);
}

View File

@@ -1,15 +1,8 @@
import 'dart:async';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
@@ -18,26 +11,14 @@ class SyncStreamService {
final SyncApiRepository _syncApiRepository;
final SyncStreamRepository _syncStreamRepository;
final DriftLocalAssetRepository _localAssetRepository;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final LocalFilesManagerRepository _localFilesManager;
final StorageRepository _storageRepository;
final bool Function()? _cancelChecker;
SyncStreamService({
required SyncApiRepository syncApiRepository,
required SyncStreamRepository syncStreamRepository,
required DriftLocalAssetRepository localAssetRepository,
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required LocalFilesManagerRepository localFilesManager,
required StorageRepository storageRepository,
bool Function()? cancelChecker,
}) : _syncApiRepository = syncApiRepository,
_syncStreamRepository = syncStreamRepository,
_localAssetRepository = localAssetRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_localFilesManager = localFilesManager,
_storageRepository = storageRepository,
_cancelChecker = cancelChecker;
bool get isCancelled => _cancelChecker?.call() ?? false;
@@ -102,18 +83,7 @@ class SyncStreamService {
case SyncEntityType.partnerDeleteV1:
return _syncStreamRepository.deletePartnerV1(data.cast());
case SyncEntityType.assetV1:
final remoteSyncAssets = data.cast<SyncAssetV1>();
await _syncStreamRepository.updateAssetsV1(remoteSyncAssets);
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
final hasPermission = await _localFilesManager.hasManageMediaPermission();
if (hasPermission) {
await _handleRemoteTrashed(remoteSyncAssets.where((e) => e.deletedAt != null).map((e) => e.checksum));
await _applyRemoteRestoreToLocal();
} else {
_logger.warning("sync Trashed Assets cannot proceed because MANAGE_MEDIA permission is missing");
}
}
return;
return _syncStreamRepository.updateAssetsV1(data.cast());
case SyncEntityType.assetDeleteV1:
return _syncStreamRepository.deleteAssetsV1(data.cast());
case SyncEntityType.assetExifV1:
@@ -242,36 +212,4 @@ class SyncStreamService {
_logger.severe("Error processing AssetUploadReadyV1 websocket batch events", error, stackTrace);
}
}
Future<void> _handleRemoteTrashed(Iterable<String> checksums) async {
if (checksums.isEmpty) {
return Future.value();
} else {
final localAssetsToTrash = await _localAssetRepository.getAssetsFromBackupAlbums(checksums);
if (localAssetsToTrash.isNotEmpty) {
final mediaUrls = await Future.wait(
localAssetsToTrash.values
.expand((e) => e)
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
);
_logger.info("Moving to trash ${mediaUrls.join(", ")} assets");
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
if (result) {
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
}
} else {
_logger.info("No assets found in backup-enabled albums for assets: $checksums");
}
}
}
Future<void> _applyRemoteRestoreToLocal() async {
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
if (assetsToRestore.isNotEmpty) {
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
} else {
_logger.info("No remote assets found for restoration");
}
}
}

View File

@@ -0,0 +1,18 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
class LocalAssetUploadEntity extends Table with DriftDefaultsMixin {
const LocalAssetUploadEntity();
TextColumn get assetId => text().references(LocalAssetEntity, #id, onDelete: KeyAction.cascade)();
IntColumn get numberOfAttempts => integer().withDefault(const Constant(0))();
DateTimeColumn get lastAttemptAt => dateTime().withDefault(currentDateAndTime)();
IntColumn get errorType => intEnum<UploadErrorType>().withDefault(const Constant(0))();
TextColumn get errorMessage => text().nullable()();
@override
Set<Column> get primaryKey => {assetId};
}

View File

@@ -0,0 +1,783 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.drift.dart'
as i1;
import 'package:immich_mobile/constants/enums.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.dart'
as i3;
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart'
as i5;
import 'package:drift/internal/modular.dart' as i6;
typedef $$LocalAssetUploadEntityTableCreateCompanionBuilder =
i1.LocalAssetUploadEntityCompanion Function({
required String assetId,
i0.Value<int> numberOfAttempts,
i0.Value<DateTime> lastAttemptAt,
i0.Value<i2.UploadErrorType> errorType,
i0.Value<String?> errorMessage,
});
typedef $$LocalAssetUploadEntityTableUpdateCompanionBuilder =
i1.LocalAssetUploadEntityCompanion Function({
i0.Value<String> assetId,
i0.Value<int> numberOfAttempts,
i0.Value<DateTime> lastAttemptAt,
i0.Value<i2.UploadErrorType> errorType,
i0.Value<String?> errorMessage,
});
final class $$LocalAssetUploadEntityTableReferences
extends
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData
> {
$$LocalAssetUploadEntityTableReferences(
super.$_db,
super.$_table,
super.$_typedResult,
);
static i5.$LocalAssetEntityTable _assetIdTable(i0.GeneratedDatabase db) =>
i6.ReadDatabaseContainer(db)
.resultSet<i5.$LocalAssetEntityTable>('local_asset_entity')
.createAlias(
i0.$_aliasNameGenerator(
i6.ReadDatabaseContainer(db)
.resultSet<i1.$LocalAssetUploadEntityTable>(
'local_asset_upload_entity',
)
.assetId,
i6.ReadDatabaseContainer(
db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity').id,
),
);
i5.$$LocalAssetEntityTableProcessedTableManager get assetId {
final $_column = $_itemColumn<String>('asset_id')!;
final manager = i5
.$$LocalAssetEntityTableTableManager(
$_db,
i6.ReadDatabaseContainer(
$_db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
)
.filter((f) => f.id.sqlEquals($_column));
final item = $_typedResult.readTableOrNull(_assetIdTable($_db));
if (item == null) return manager;
return i0.ProcessedTableManager(
manager.$state.copyWith(prefetchedData: [item]),
);
}
}
class $$LocalAssetUploadEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnWithTypeConverterFilters<i2.UploadErrorType, i2.UploadErrorType, int>
get errorType => $composableBuilder(
column: $table.errorType,
builder: (column) => i0.ColumnWithTypeConverterFilters(column),
);
i0.ColumnFilters<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => i0.ColumnFilters(column),
);
i5.$$LocalAssetEntityTableFilterComposer get assetId {
final i5.$$LocalAssetEntityTableFilterComposer composer = $composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableFilterComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<int> get errorType => $composableBuilder(
column: $table.errorType,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => i0.ColumnOrderings(column),
);
i5.$$LocalAssetEntityTableOrderingComposer get assetId {
final i5.$$LocalAssetEntityTableOrderingComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableOrderingComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => column,
);
i0.GeneratedColumn<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => column,
);
i0.GeneratedColumnWithTypeConverter<i2.UploadErrorType, int> get errorType =>
$composableBuilder(column: $table.errorType, builder: (column) => column);
i0.GeneratedColumn<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => column,
);
i5.$$LocalAssetEntityTableAnnotationComposer get assetId {
final i5.$$LocalAssetEntityTableAnnotationComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableAnnotationComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableTableManager
extends
i0.RootTableManager<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableFilterComposer,
i1.$$LocalAssetUploadEntityTableOrderingComposer,
i1.$$LocalAssetUploadEntityTableAnnotationComposer,
$$LocalAssetUploadEntityTableCreateCompanionBuilder,
$$LocalAssetUploadEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableReferences,
),
i1.LocalAssetUploadEntityData,
i0.PrefetchHooks Function({bool assetId})
> {
$$LocalAssetUploadEntityTableTableManager(
i0.GeneratedDatabase db,
i1.$LocalAssetUploadEntityTable table,
) : super(
i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () =>
i1.$$LocalAssetUploadEntityTableFilterComposer(
$db: db,
$table: table,
),
createOrderingComposer: () =>
i1.$$LocalAssetUploadEntityTableOrderingComposer(
$db: db,
$table: table,
),
createComputedFieldComposer: () =>
i1.$$LocalAssetUploadEntityTableAnnotationComposer(
$db: db,
$table: table,
),
updateCompanionCallback:
({
i0.Value<String> assetId = const i0.Value.absent(),
i0.Value<int> numberOfAttempts = const i0.Value.absent(),
i0.Value<DateTime> lastAttemptAt = const i0.Value.absent(),
i0.Value<i2.UploadErrorType> errorType =
const i0.Value.absent(),
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityCompanion(
assetId: assetId,
numberOfAttempts: numberOfAttempts,
lastAttemptAt: lastAttemptAt,
errorType: errorType,
errorMessage: errorMessage,
),
createCompanionCallback:
({
required String assetId,
i0.Value<int> numberOfAttempts = const i0.Value.absent(),
i0.Value<DateTime> lastAttemptAt = const i0.Value.absent(),
i0.Value<i2.UploadErrorType> errorType =
const i0.Value.absent(),
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityCompanion.insert(
assetId: assetId,
numberOfAttempts: numberOfAttempts,
lastAttemptAt: lastAttemptAt,
errorType: errorType,
errorMessage: errorMessage,
),
withReferenceMapper: (p0) => p0
.map(
(e) => (
e.readTable(table),
i1.$$LocalAssetUploadEntityTableReferences(db, table, e),
),
)
.toList(),
prefetchHooksCallback: ({assetId = false}) {
return i0.PrefetchHooks(
db: db,
explicitlyWatchedTables: [],
addJoins:
<
T extends i0.TableManagerState<
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic
>
>(state) {
if (assetId) {
state =
state.withJoin(
currentTable: table,
currentColumn: table.assetId,
referencedTable: i1
.$$LocalAssetUploadEntityTableReferences
._assetIdTable(db),
referencedColumn: i1
.$$LocalAssetUploadEntityTableReferences
._assetIdTable(db)
.id,
)
as T;
}
return state;
},
getPrefetchedDataCallback: (items) async {
return [];
},
);
},
),
);
}
typedef $$LocalAssetUploadEntityTableProcessedTableManager =
i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableFilterComposer,
i1.$$LocalAssetUploadEntityTableOrderingComposer,
i1.$$LocalAssetUploadEntityTableAnnotationComposer,
$$LocalAssetUploadEntityTableCreateCompanionBuilder,
$$LocalAssetUploadEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableReferences,
),
i1.LocalAssetUploadEntityData,
i0.PrefetchHooks Function({bool assetId})
>;
class $LocalAssetUploadEntityTable extends i3.LocalAssetUploadEntity
with
i0.TableInfo<
$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData
> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$LocalAssetUploadEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _assetIdMeta = const i0.VerificationMeta(
'assetId',
);
@override
late final i0.GeneratedColumn<String> assetId = i0.GeneratedColumn<String>(
'asset_id',
aliasedName,
false,
type: i0.DriftSqlType.string,
requiredDuringInsert: true,
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
'REFERENCES local_asset_entity (id) ON DELETE CASCADE',
),
);
static const i0.VerificationMeta _numberOfAttemptsMeta =
const i0.VerificationMeta('numberOfAttempts');
@override
late final i0.GeneratedColumn<int> numberOfAttempts = i0.GeneratedColumn<int>(
'number_of_attempts',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultValue: const i4.Constant(0),
);
static const i0.VerificationMeta _lastAttemptAtMeta =
const i0.VerificationMeta('lastAttemptAt');
@override
late final i0.GeneratedColumn<DateTime> lastAttemptAt =
i0.GeneratedColumn<DateTime>(
'last_attempt_at',
aliasedName,
false,
type: i0.DriftSqlType.dateTime,
requiredDuringInsert: false,
defaultValue: i4.currentDateAndTime,
);
@override
late final i0.GeneratedColumnWithTypeConverter<i2.UploadErrorType, int>
errorType =
i0.GeneratedColumn<int>(
'error_type',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultValue: const i4.Constant(0),
).withConverter<i2.UploadErrorType>(
i1.$LocalAssetUploadEntityTable.$convertererrorType,
);
static const i0.VerificationMeta _errorMessageMeta =
const i0.VerificationMeta('errorMessage');
@override
late final i0.GeneratedColumn<String> errorMessage =
i0.GeneratedColumn<String>(
'error_message',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
List<i0.GeneratedColumn> get $columns => [
assetId,
numberOfAttempts,
lastAttemptAt,
errorType,
errorMessage,
];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'local_asset_upload_entity';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.LocalAssetUploadEntityData> instance, {
bool isInserting = false,
}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('asset_id')) {
context.handle(
_assetIdMeta,
assetId.isAcceptableOrUnknown(data['asset_id']!, _assetIdMeta),
);
} else if (isInserting) {
context.missing(_assetIdMeta);
}
if (data.containsKey('number_of_attempts')) {
context.handle(
_numberOfAttemptsMeta,
numberOfAttempts.isAcceptableOrUnknown(
data['number_of_attempts']!,
_numberOfAttemptsMeta,
),
);
}
if (data.containsKey('last_attempt_at')) {
context.handle(
_lastAttemptAtMeta,
lastAttemptAt.isAcceptableOrUnknown(
data['last_attempt_at']!,
_lastAttemptAtMeta,
),
);
}
if (data.containsKey('error_message')) {
context.handle(
_errorMessageMeta,
errorMessage.isAcceptableOrUnknown(
data['error_message']!,
_errorMessageMeta,
),
);
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {assetId};
@override
i1.LocalAssetUploadEntityData map(
Map<String, dynamic> data, {
String? tablePrefix,
}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.LocalAssetUploadEntityData(
assetId: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}asset_id'],
)!,
numberOfAttempts: attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}number_of_attempts'],
)!,
lastAttemptAt: attachedDatabase.typeMapping.read(
i0.DriftSqlType.dateTime,
data['${effectivePrefix}last_attempt_at'],
)!,
errorType: i1.$LocalAssetUploadEntityTable.$convertererrorType.fromSql(
attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}error_type'],
)!,
),
errorMessage: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}error_message'],
),
);
}
@override
$LocalAssetUploadEntityTable createAlias(String alias) {
return $LocalAssetUploadEntityTable(attachedDatabase, alias);
}
static i0.JsonTypeConverter2<i2.UploadErrorType, int, int>
$convertererrorType = const i0.EnumIndexConverter<i2.UploadErrorType>(
i2.UploadErrorType.values,
);
@override
bool get withoutRowId => true;
@override
bool get isStrict => true;
}
class LocalAssetUploadEntityData extends i0.DataClass
implements i0.Insertable<i1.LocalAssetUploadEntityData> {
final String assetId;
final int numberOfAttempts;
final DateTime lastAttemptAt;
final i2.UploadErrorType errorType;
final String? errorMessage;
const LocalAssetUploadEntityData({
required this.assetId,
required this.numberOfAttempts,
required this.lastAttemptAt,
required this.errorType,
this.errorMessage,
});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['asset_id'] = i0.Variable<String>(assetId);
map['number_of_attempts'] = i0.Variable<int>(numberOfAttempts);
map['last_attempt_at'] = i0.Variable<DateTime>(lastAttemptAt);
{
map['error_type'] = i0.Variable<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toSql(errorType),
);
}
if (!nullToAbsent || errorMessage != null) {
map['error_message'] = i0.Variable<String>(errorMessage);
}
return map;
}
factory LocalAssetUploadEntityData.fromJson(
Map<String, dynamic> json, {
i0.ValueSerializer? serializer,
}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return LocalAssetUploadEntityData(
assetId: serializer.fromJson<String>(json['assetId']),
numberOfAttempts: serializer.fromJson<int>(json['numberOfAttempts']),
lastAttemptAt: serializer.fromJson<DateTime>(json['lastAttemptAt']),
errorType: i1.$LocalAssetUploadEntityTable.$convertererrorType.fromJson(
serializer.fromJson<int>(json['errorType']),
),
errorMessage: serializer.fromJson<String?>(json['errorMessage']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'assetId': serializer.toJson<String>(assetId),
'numberOfAttempts': serializer.toJson<int>(numberOfAttempts),
'lastAttemptAt': serializer.toJson<DateTime>(lastAttemptAt),
'errorType': serializer.toJson<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toJson(errorType),
),
'errorMessage': serializer.toJson<String?>(errorMessage),
};
}
i1.LocalAssetUploadEntityData copyWith({
String? assetId,
int? numberOfAttempts,
DateTime? lastAttemptAt,
i2.UploadErrorType? errorType,
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityData(
assetId: assetId ?? this.assetId,
numberOfAttempts: numberOfAttempts ?? this.numberOfAttempts,
lastAttemptAt: lastAttemptAt ?? this.lastAttemptAt,
errorType: errorType ?? this.errorType,
errorMessage: errorMessage.present ? errorMessage.value : this.errorMessage,
);
LocalAssetUploadEntityData copyWithCompanion(
i1.LocalAssetUploadEntityCompanion data,
) {
return LocalAssetUploadEntityData(
assetId: data.assetId.present ? data.assetId.value : this.assetId,
numberOfAttempts: data.numberOfAttempts.present
? data.numberOfAttempts.value
: this.numberOfAttempts,
lastAttemptAt: data.lastAttemptAt.present
? data.lastAttemptAt.value
: this.lastAttemptAt,
errorType: data.errorType.present ? data.errorType.value : this.errorType,
errorMessage: data.errorMessage.present
? data.errorMessage.value
: this.errorMessage,
);
}
@override
String toString() {
return (StringBuffer('LocalAssetUploadEntityData(')
..write('assetId: $assetId, ')
..write('numberOfAttempts: $numberOfAttempts, ')
..write('lastAttemptAt: $lastAttemptAt, ')
..write('errorType: $errorType, ')
..write('errorMessage: $errorMessage')
..write(')'))
.toString();
}
@override
int get hashCode => Object.hash(
assetId,
numberOfAttempts,
lastAttemptAt,
errorType,
errorMessage,
);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.LocalAssetUploadEntityData &&
other.assetId == this.assetId &&
other.numberOfAttempts == this.numberOfAttempts &&
other.lastAttemptAt == this.lastAttemptAt &&
other.errorType == this.errorType &&
other.errorMessage == this.errorMessage);
}
class LocalAssetUploadEntityCompanion
extends i0.UpdateCompanion<i1.LocalAssetUploadEntityData> {
final i0.Value<String> assetId;
final i0.Value<int> numberOfAttempts;
final i0.Value<DateTime> lastAttemptAt;
final i0.Value<i2.UploadErrorType> errorType;
final i0.Value<String?> errorMessage;
const LocalAssetUploadEntityCompanion({
this.assetId = const i0.Value.absent(),
this.numberOfAttempts = const i0.Value.absent(),
this.lastAttemptAt = const i0.Value.absent(),
this.errorType = const i0.Value.absent(),
this.errorMessage = const i0.Value.absent(),
});
LocalAssetUploadEntityCompanion.insert({
required String assetId,
this.numberOfAttempts = const i0.Value.absent(),
this.lastAttemptAt = const i0.Value.absent(),
this.errorType = const i0.Value.absent(),
this.errorMessage = const i0.Value.absent(),
}) : assetId = i0.Value(assetId);
static i0.Insertable<i1.LocalAssetUploadEntityData> custom({
i0.Expression<String>? assetId,
i0.Expression<int>? numberOfAttempts,
i0.Expression<DateTime>? lastAttemptAt,
i0.Expression<int>? errorType,
i0.Expression<String>? errorMessage,
}) {
return i0.RawValuesInsertable({
if (assetId != null) 'asset_id': assetId,
if (numberOfAttempts != null) 'number_of_attempts': numberOfAttempts,
if (lastAttemptAt != null) 'last_attempt_at': lastAttemptAt,
if (errorType != null) 'error_type': errorType,
if (errorMessage != null) 'error_message': errorMessage,
});
}
i1.LocalAssetUploadEntityCompanion copyWith({
i0.Value<String>? assetId,
i0.Value<int>? numberOfAttempts,
i0.Value<DateTime>? lastAttemptAt,
i0.Value<i2.UploadErrorType>? errorType,
i0.Value<String?>? errorMessage,
}) {
return i1.LocalAssetUploadEntityCompanion(
assetId: assetId ?? this.assetId,
numberOfAttempts: numberOfAttempts ?? this.numberOfAttempts,
lastAttemptAt: lastAttemptAt ?? this.lastAttemptAt,
errorType: errorType ?? this.errorType,
errorMessage: errorMessage ?? this.errorMessage,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (assetId.present) {
map['asset_id'] = i0.Variable<String>(assetId.value);
}
if (numberOfAttempts.present) {
map['number_of_attempts'] = i0.Variable<int>(numberOfAttempts.value);
}
if (lastAttemptAt.present) {
map['last_attempt_at'] = i0.Variable<DateTime>(lastAttemptAt.value);
}
if (errorType.present) {
map['error_type'] = i0.Variable<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toSql(
errorType.value,
),
);
}
if (errorMessage.present) {
map['error_message'] = i0.Variable<String>(errorMessage.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('LocalAssetUploadEntityCompanion(')
..write('assetId: $assetId, ')
..write('numberOfAttempts: $numberOfAttempts, ')
..write('lastAttemptAt: $lastAttemptAt, ')
..write('errorType: $errorType, ')
..write('errorMessage: $errorMessage')
..write(')'))
.toString();
}
}

View File

@@ -1,40 +0,0 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)')
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)')
class TrashedLocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
const TrashedLocalAssetEntity();
TextColumn get id => text()();
TextColumn get albumId => text()();
TextColumn get checksum => text().nullable()();
BoolColumn get isFavorite => boolean().withDefault(const Constant(false))();
IntColumn get orientation => integer().withDefault(const Constant(0))();
@override
Set<Column> get primaryKey => {id, albumId};
}
extension TrashedLocalAssetEntityDataDomainExtension on TrashedLocalAssetEntityData {
LocalAsset toLocalAsset() => LocalAsset(
id: id,
name: name,
checksum: checksum,
type: type,
createdAt: createdAt,
updatedAt: updatedAt,
durationInSeconds: durationInSeconds,
isFavorite: isFavorite,
height: height,
width: width,
orientation: orientation,
);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +0,0 @@
import 'package:drift/drift.dart' hide Index;
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_upload_tasks_local_id ON upload_task_entity(local_id);')
@TableIndex.sql('CREATE INDEX idx_upload_tasks_asset_data ON upload_task_entity(status, priority DESC, created_at);')
class UploadTaskEntity extends Table {
const UploadTaskEntity();
IntColumn get id => integer().autoIncrement()();
IntColumn get attempts => integer()();
DateTimeColumn get createdAt => dateTime()();
TextColumn get filePath => text()();
BoolColumn get isLivePhoto => boolean().nullable()();
IntColumn get lastError => integer().nullable()();
TextColumn get livePhotoVideoId => text().nullable()();
TextColumn get localId => text()();
IntColumn get method => integer()();
RealColumn get priority => real()();
DateTimeColumn get retryAfter => dateTime().nullable()();
IntColumn get status => integer()();
@override
bool get isStrict => true;
}

Some files were not shown because too many files have changed in this diff Show More