Compare commits

..

1 Commits

Author SHA1 Message Date
Zack Pollard
61c4b27b94 refactor: rename background service to legacy 2025-08-06 11:06:15 +01:00
327 changed files with 65826 additions and 41682 deletions

View File

@@ -49,11 +49,10 @@ fix_permissions() {
log "Fixing permissions for ${IMMICH_WORKSPACE}"
run_cmd sudo find "${IMMICH_WORKSPACE}/server/upload" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres/*" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres" -exec chown node {} +
# Change ownership for directories that exist
for dir in "${IMMICH_WORKSPACE}/.vscode" \
"${IMMICH_WORKSPACE}/server/upload" \
"${IMMICH_WORKSPACE}/.pnpm-store" \
"${IMMICH_WORKSPACE}/.github/node_modules" \
"${IMMICH_WORKSPACE}/cli/node_modules" \
"${IMMICH_WORKSPACE}/e2e/node_modules" \
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \

View File

@@ -8,13 +8,21 @@ services:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/data/upload
- /etc/localtime:/etc/localtime:ro
immich-web:
env_file: !reset []
immich-machine-learning:
env_file: !reset []
database:
env_file: !reset []
environment: !override
@@ -25,10 +33,17 @@ services:
POSTGRES_HOST_AUTH_METHOD: md5
volumes:
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
redis:
env_file: !reset []
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
upload1-devcontainer-volume:
upload2-devcontainer-volume:
postgres-devcontainer-volume:

View File

@@ -3,20 +3,15 @@
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Preparing Immich Nest API Server"
log ""
export CI=1
run_cmd pnpm --filter immich install
log "Starting Nest API Server"
log ""
cd "${IMMICH_WORKSPACE}/server" || (
log "Immich workspace not found"jj
log "Immich workspace not found"
exit 1
)
while true; do
run_cmd pnpm --filter immich exec nest start --debug "0.0.0.0:9230" --watch
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -3,13 +3,6 @@
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
export CI=1
log "Preparing Immich Web Frontend"
log ""
run_cmd pnpm --filter @immich/sdk install
run_cmd pnpm --filter @immich/sdk build
run_cmd pnpm --filter immich-web install
log "Starting Immich Web Frontend"
log ""
cd "${IMMICH_WORKSPACE}/web" || (
@@ -23,7 +16,7 @@ until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_
done
while true; do
run_cmd pnpm --filter immich-web exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -6,6 +6,9 @@ source /immich-devcontainer/container-common.sh
log "Setting up Immich dev container..."
fix_permissions
log "Installing npm dependencies (node_modules)..."
install_dependencies
log "Setup complete, please wait while backend and frontend services automatically start"
log
log "If necessary, the services may be manually started using"

28
.github/package-lock.json generated vendored Normal file
View File

@@ -0,0 +1,28 @@
{
"name": ".github",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"devDependencies": {
"prettier": "^3.5.3"
}
},
"node_modules/prettier": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
"integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
}
}
}

View File

@@ -33,24 +33,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- run: pnpm install --frozen-lockfile
- run: pnpm build
- run: pnpm publish
- name: Prepare SDK
run: npm ci --prefix ../open-api/typescript-sdk/
- name: Build SDK
run: npm run build --prefix ../open-api/typescript-sdk/
- run: npm ci
- run: npm run build
- run: npm publish
if: ${{ github.event_name == 'release' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -51,7 +51,7 @@ jobs:
run: |
gh api graphql \
-f issueId="$NODE_ID" \
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one." \
-f query='
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
addComment(input: {
@@ -77,7 +77,7 @@ jobs:
run: |
gh api graphql \
-f discussionId="$NODE_ID" \
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one." \
-f query='
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
addDiscussionComment(input: {

View File

@@ -55,24 +55,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './docs/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run install
run: pnpm install
- name: Run npm install
run: npm ci
- name: Check formatting
run: pnpm format
run: npm run format
- name: Run build
run: pnpm build
run: npm run build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2

View File

@@ -32,8 +32,8 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Fix formatting
run: make install-all && make format-all

View File

@@ -20,7 +20,7 @@ jobs:
remove-label:
runs-on: ubuntu-latest
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
if: ${{ github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'preview') }}
permissions:
pull-requests: write
steps:
@@ -33,15 +33,3 @@ jobs:
repo: context.repo.repo,
name: 'preview'
})
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'PRs from forks cannot have preview environments.'
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'Preview environment has been removed.'

View File

@@ -20,21 +20,18 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install deps
run: pnpm install --frozen-lockfile
run: npm ci
- name: Build
run: pnpm build
run: npm run build
- name: Publish
run: pnpm publish
run: npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -4,10 +4,13 @@ on:
pull_request:
push:
branches: [main]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
@@ -29,6 +32,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
with:
@@ -54,9 +58,11 @@ jobs:
- '.github/workflows/test.yml'
.github:
- '.github/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
server-unit-tests:
name: Test & Lint Server
needs: pre-job
@@ -67,33 +73,39 @@ jobs:
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run package manager install
run: pnpm install
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
- name: Run linter
run: pnpm lint
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: npm run check
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: pnpm test
run: npm test
if: ${{ !cancelled() }}
cli-unit-tests:
name: Unit Test CLI
needs: pre-job
@@ -104,36 +116,43 @@ jobs:
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: pnpm install
run: npm ci
- name: Run linter
run: pnpm lint
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: npm run check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: npm run test
if: ${{ !cancelled() }}
cli-unit-tests-win:
name: Unit Test CLI (Windows)
needs: pre-job
@@ -144,31 +163,36 @@ jobs:
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: pnpm install --frozen-lockfile
run: npm ci
# Skip linter & formatter in Windows test.
- name: Run tsc
run: pnpm check
run: npm run check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: npm run test
if: ${{ !cancelled() }}
web-lint:
name: Lint Web
needs: pre-job
@@ -179,33 +203,39 @@ jobs:
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Run pnpm install
run: pnpm rebuild && pnpm install --frozen-lockfile
- name: Run npm install
run: npm ci
- name: Run linter
run: pnpm lint:p
run: npm run lint:p
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: pnpm check:svelte
run: npm run check:svelte
if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
needs: pre-job
@@ -216,30 +246,35 @@ jobs:
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: pnpm install --frozen-lockfile
run: npm ci
- name: Run tsc
run: pnpm check:typescript
run: npm run check:typescript
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: npm run test
if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
needs: pre-job
@@ -252,24 +287,27 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install dependencies
run: pnpm --filter=immich-web install --frozen-lockfile
run: npm --prefix=web ci
- name: Format
run: pnpm --filter=immich-web format:i18n
run: npm --prefix=web run format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
i18n/**
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
@@ -278,6 +316,7 @@ jobs:
echo "ERROR: i18n files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
e2e-tests-lint:
name: End-to-End Lint
needs: pre-job
@@ -288,35 +327,41 @@ jobs:
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: npm ci
if: ${{ !cancelled() }}
- name: Run linter
run: pnpm lint
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: npm run check
if: ${{ !cancelled() }}
server-medium-tests:
name: Medium Tests (Server)
needs: pre-job
@@ -327,24 +372,27 @@ jobs:
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
- name: Run medium tests
run: pnpm test:medium
run: npm run test:medium
if: ${{ !cancelled() }}
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
needs: pre-job
@@ -358,41 +406,43 @@ jobs:
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Run setup web
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
working-directory: ./web
if: ${{ !cancelled() }}
- name: Run setup cli
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./cli
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: npm ci
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: pnpm test
run: npm run test
if: ${{ !cancelled() }}
e2e-tests-web:
name: End-to-End Tests (Web)
needs: pre-job
@@ -406,36 +456,42 @@ jobs:
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: npm ci
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
run: npx playwright install chromium --only-shell
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
run: npx playwright test
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
@@ -446,6 +502,7 @@ jobs:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
mobile-unit-tests:
name: Unit Test Mobile
needs: pre-job
@@ -457,19 +514,21 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Run tests
working-directory: ./mobile
run: flutter test -j 1
ml-unit-tests:
name: Unit Test ML
needs: pre-job
@@ -484,6 +543,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
@@ -506,6 +566,7 @@ jobs:
- name: Run tests and coverage
run: |
uv run pytest --cov=immich_ml --cov-report term-missing
github-files-formatting:
name: .github Files Formatting
needs: pre-job
@@ -516,24 +577,27 @@ jobs:
defaults:
run:
working-directory: ./.github
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: pnpm install --frozen-lockfile
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
shellcheck:
name: ShellCheck
runs-on: ubuntu-latest
@@ -543,11 +607,15 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
with:
ignore_paths: >-
**/open-api/** **/openapi** **/node_modules/**
**/open-api/**
**/openapi**
**/node_modules/**
generated-api-up-to-date:
name: OpenAPI Clients
runs-on: ubuntu-latest
@@ -558,20 +626,23 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile
run: npm --prefix=server ci
- name: Build the app
run: pnpm --filter immich build
run: npm --prefix=server run build
- name: Run API generation
run: make open-api
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -580,6 +651,7 @@ jobs:
mobile/openapi
open-api/typescript-sdk
open-api/immich-openapi-specs.json
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
@@ -588,6 +660,7 @@ jobs:
echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
sql-schema-up-to-date:
name: SQL Schema Checks
runs-on: ubuntu-latest
@@ -601,36 +674,45 @@ jobs:
POSTGRES_USER: postgres
POSTGRES_DB: immich
options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
run: npm ci
- name: Build the app
run: pnpm build
run: npm run build
- name: Run existing migrations
run: pnpm migrations:run
run: npm run migrations:run
- name: Test npm run schema:reset command works
run: pnpm schema:reset
run: npm run schema:reset
- name: Generate new migrations
continue-on-error: true
run: pnpm migrations:generate src/TestMigration
run: npm run migrations:generate src/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -646,16 +728,19 @@ jobs:
echo "Changed files: ${CHANGED_FILES}"
cat ./src/*-TestMigration.ts
exit 1
- name: Run SQL generation
run: pnpm sync:sql
run: npm run sync:sql
env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-sql-files
with:
files: |
server/src/queries
- name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
@@ -666,77 +751,77 @@ jobs:
git diff
exit 1
# mobile-integration-tests:
# name: Run mobile end-to-end integration tests
# runs-on: macos-latest
# steps:
# - uses: actions/checkout@v4
# - uses: actions/setup-java@v3
# with:
# distribution: 'zulu'
# java-version: '12.x'
# cache: 'gradle'
# - name: Cache android SDK
# uses: actions/cache@v3
# id: android-sdk
# with:
# key: android-sdk
# path: |
# /usr/local/lib/android/
# ~/.android
# - name: Cache Gradle
# uses: actions/cache@v3
# with:
# path: |
# ./mobile/build/
# ./mobile/android/.gradle/
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
# - name: Setup Android SDK
# if: steps.android-sdk.outputs.cache-hit != 'true'
# uses: android-actions/setup-android@v2
# - name: AVD cache
# uses: actions/cache@v3
# id: avd-cache
# with:
# path: |
# ~/.android/avd/*
# ~/.android/adb*
# key: avd-29
# - name: create AVD and generate snapshot for caching
# if: steps.avd-cache.outputs.cache-hit != 'true'
# uses: reactivecircus/android-emulator-runner@v2.27.0
# with:
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: false
# script: echo "Generated AVD snapshot for caching."
# - name: Setup Flutter SDK
# uses: subosito/flutter-action@v2
# with:
# channel: 'stable'
# flutter-version: '3.7.3'
# cache: true
# - name: Run integration tests
# uses: Wandalen/wretry.action@master
# with:
# action: reactivecircus/android-emulator-runner@v2.27.0
# with: |
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: true
# script: |
# flutter pub get
# flutter test integration_test
# attempt_limit: 3
# mobile-integration-tests:
# name: Run mobile end-to-end integration tests
# runs-on: macos-latest
# steps:
# - uses: actions/checkout@v4
# - uses: actions/setup-java@v3
# with:
# distribution: 'zulu'
# java-version: '12.x'
# cache: 'gradle'
# - name: Cache android SDK
# uses: actions/cache@v3
# id: android-sdk
# with:
# key: android-sdk
# path: |
# /usr/local/lib/android/
# ~/.android
# - name: Cache Gradle
# uses: actions/cache@v3
# with:
# path: |
# ./mobile/build/
# ./mobile/android/.gradle/
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
# - name: Setup Android SDK
# if: steps.android-sdk.outputs.cache-hit != 'true'
# uses: android-actions/setup-android@v2
# - name: AVD cache
# uses: actions/cache@v3
# id: avd-cache
# with:
# path: |
# ~/.android/avd/*
# ~/.android/adb*
# key: avd-29
# - name: create AVD and generate snapshot for caching
# if: steps.avd-cache.outputs.cache-hit != 'true'
# uses: reactivecircus/android-emulator-runner@v2.27.0
# with:
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: false
# script: echo "Generated AVD snapshot for caching."
# - name: Setup Flutter SDK
# uses: subosito/flutter-action@v2
# with:
# channel: 'stable'
# flutter-version: '3.7.3'
# cache: true
# - name: Run integration tests
# uses: Wandalen/wretry.action@master
# with:
# action: reactivecircus/android-emulator-runner@v2.27.0
# with: |
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: true
# script: |
# flutter pub get
# flutter test integration_test
# attempt_limit: 3

1
.gitignore vendored
View File

@@ -5,7 +5,6 @@
!.vscode/launch.json
!.vscode/extensions.json
.idea
**/.pnpm-store/**
docker/upload
docker/library

View File

@@ -1,39 +0,0 @@
module.exports = {
hooks: {
readPackage: (pkg) => {
if (!pkg.name) {
return pkg;
}
switch (pkg.name) {
case "exiftool-vendored":
if (pkg.optionalDependencies["exiftool-vendored.pl"]) {
// make exiftool-vendored.pl a regular dependency
pkg.dependencies["exiftool-vendored.pl"] =
pkg.optionalDependencies["exiftool-vendored.pl"];
delete pkg.optionalDependencies["exiftool-vendored.pl"];
}
break;
case "sharp":
const optionalDeps = Object.keys(pkg.optionalDependencies).filter(
(dep) => dep.startsWith("@img")
);
for (const dep of optionalDeps) {
// remove all optionalDepdencies from sharp (they will be compiled from source), except:
// include the precompiled musl version of sharp, for web/Dockerfile
// include precompiled linux-x64 version of sharp, for server/Dockerfile, stage: web-prod
// include precompiled linux-arm64 version of sharp, for server/Dockerfile, stage: web-prod
if (
dep.includes("musl") ||
dep.includes("linux-x64") ||
dep.includes("linux-arm64")
) {
continue;
}
delete pkg.optionalDependencies[dep];
}
break;
}
return pkg;
},
},
};

View File

@@ -56,8 +56,7 @@
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
"package.json": "package-lock.json, yarn.lock, pnpm-lock.yaml, bun.lockb, bun.lock, pnpm-workspace.yaml, .pnpmfile.cjs"
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
},
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"

View File

@@ -8,10 +8,7 @@ dev-update:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
dev-scale:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
dev-docs:
npm --prefix docs run start
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
.PHONY: e2e
e2e:
@@ -43,7 +40,7 @@ open-api-typescript:
cd ./open-api && bash ./bin/generate-open-api.sh typescript
sql:
pnpm --filter immich run sync:sql
npm --prefix server run sync:sql
attach-server:
docker exec -it docker_immich-server_1 sh
@@ -53,40 +50,31 @@ renovate:
MODULES = e2e server web cli sdk docs .github
# directory to package name mapping function
# cli = @immich/cli
# docs = documentation
# e2e = immich-e2e
# open-api/typescript-sdk = @immich/sdk
# server = immich
# web = immich-web
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
audit-%:
pnpm --filter $(call map-package,$*) audit fix
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
install-%:
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline)
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
ci-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
build-cli: build-sdk
build-web: build-sdk
build-%: install-%
pnpm --filter $(call map-package,$*) run build
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
format-%:
pnpm --filter $(call map-package,$*) run format:fix
npm --prefix $* run format:fix
lint-%:
pnpm --filter $(call map-package,$*) run lint:fix
lint-web:
pnpm --filter $(call map-package,$*) run lint:p
npm --prefix $* run lint:fix
check-%:
pnpm --filter $(call map-package,$*) run check
npm --prefix $* run check
check-web:
pnpm --filter immich-web run check:typescript
pnpm --filter immich-web run check:svelte
npm --prefix web run check:typescript
npm --prefix web run check:svelte
test-%:
pnpm --filter $(call map-package,$*) run test
npm --prefix $* run test
test-e2e:
docker compose -f ./e2e/docker-compose.yml build
pnpm --filter immich-e2e run test
pnpm --filter immich-e2e run test:web
npm --prefix e2e run test
npm --prefix e2e run test:web
test-medium:
docker run \
--rm \
@@ -96,36 +84,25 @@ test-medium:
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
-e NODE_ENV=development \
immich-server:latest \
-c "pnpm test:medium -- --run"
-c "npm ci && npm run test:medium -- --run"
test-medium-dev:
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
docker exec -it immich_server /bin/sh -c "npm run test:medium"
install-all:
pnpm -r --filter '!documentation' install
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
check-all:
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
lint-all:
pnpm -r --filter '!documentation' run lint:fix
format-all:
pnpm -r --filter '!documentation' run format:fix
audit-all:
pnpm -r --filter '!documentation' audit fix
hygiene-all: audit-all
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
test-all:
pnpm -r --filter '!documentation' run "/^test/"
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ;
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
audit-all: $(foreach M,$(MODULES),audit-$M) ;
hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
clean:
find . -name "node_modules" -type d -prune -exec rm -rf {} +
find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name ".svelte-kit" -type d -prune -exec rm -rf '{}' +
find . -name "coverage" -type d -prune -exec rm -rf '{}' +
find . -name ".pnpm-store" -type d -prune -exec rm -rf '{}' +
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true

View File

@@ -1,14 +1,19 @@
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
RUN npm ci
COPY open-api/typescript-sdk/ ./
RUN npm run build
WORKDIR /usr/src/app
COPY package* pnpm* .pnpmfile.cjs ./
COPY ./cli ./cli/
COPY ./open-api/typescript-sdk ./open-api/typescript-sdk/
RUN corepack enable pnpm && \
pnpm install --filter @immich/sdk --filter @immich/cli --frozen-lockfile && \
pnpm --filter @immich/sdk build && \
pnpm --filter @immich/cli build
COPY cli/package.json cli/package-lock.json ./
RUN npm ci
COPY cli .
RUN npm run build
WORKDIR /import
ENTRYPOINT ["node", "/usr/src/app/cli/dist"]
ENTRYPOINT ["node", "/usr/src/app/dist"]

View File

@@ -6,10 +6,8 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
# if you don't have pnpm installed
$ npm install -g pnpm
$ pnpm install
$ pnpm build
$ npm install
$ npm run build
Then, to build the open-api client run the following in the open-api folder:
@@ -17,10 +15,8 @@ Then, to build the open-api client run the following in the open-api folder:
To run the Immich CLI from source, run the following in the cli folder:
# if you don't have pnpm installed
$ npm install -g pnpm
$ pnpm install
$ pnpm build
$ npm install
$ npm run build
$ ts-node .
You'll need ts-node, the easiest way to install it is to use npm:

4600
cli/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.79",
"version": "2.2.77",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",

View File

@@ -21,16 +21,17 @@ services:
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
user: "${UID:-1000}:${GID:-1000}"
build:
context: ../
dockerfile: server/Dockerfile
target: dev
restart: unless-stopped
volumes:
- ..:/usr/src/app
- ../server:/usr/src/app/server
- ../open-api:/usr/src/app/open-api
- ${UPLOAD_LOCATION}/photos:/data
- ${UPLOAD_LOCATION}/photos/upload:/data/upload
- /usr/src/app/server/node_modules
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
@@ -57,12 +58,8 @@ services:
- 9231:9231
- 2283:2283
depends_on:
redis:
condition: service_started
database:
condition: service_started
init:
condition: service_completed_successfully
- redis
- database
healthcheck:
disable: false
@@ -71,7 +68,6 @@ services:
image: immich-web-dev:latest
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
# user: 0:0
user: "${UID:-1000}:${GID:-1000}"
build:
context: ../
dockerfile: web/Dockerfile
@@ -82,17 +78,18 @@ services:
- 3000:3000
- 24678:24678
volumes:
- ..:/usr/src/app
- ../web:/usr/src/app/web
- ../i18n:/usr/src/app/i18n
- ../open-api/:/usr/src/app/open-api/
# - ../../ui:/usr/ui
- /usr/src/app/web/node_modules
ulimits:
nofile:
soft: 1048576
hard: 1048576
restart: unless-stopped
depends_on:
immich-server:
condition: service_started
init:
condition: service_completed_successfully
- immich-server
immich-machine-learning:
container_name: immich_machine_learning
@@ -120,7 +117,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:5b8f8c333bef895c925f56629d6ba90aea95a4f7391f62411e625267c600b19c
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
healthcheck:
test: redis-cli ping || exit 1
@@ -160,14 +157,6 @@ services:
# volumes:
# - grafana-data:/var/lib/grafana
init:
container_name: init
image: busybox
env_file:
- .env
user: 0:0
command: sh -c 'for path in /usr/src/app/.pnpm-store /usr/src/app/server/node_modules /usr/src/app/.github/node_modules /usr/src/app/cli/node_modules /usr/src/app/docs/node_modules /usr/src/app/e2e/node_modules /usr/src/app/open-api/typescript-sdk/node_modules /usr/src/app/web/.svelte-kit /usr/src/app/web/coverage /usr/src/app/node_modules /usr/src/app/web/node_modules; do [ -e "$$path" ] && chown -R ${UID:-1000}:${GID:-1000} "$$path" || true; done'
volumes:
model-cache:
prometheus-data:

View File

@@ -56,7 +56,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:5b8f8c333bef895c925f56629d6ba90aea95a4f7391f62411e625267c600b19c
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
healthcheck:
test: redis-cli ping || exit 1
restart: always

View File

@@ -49,7 +49,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:5b8f8c333bef895c925f56629d6ba90aea95a4f7391f62411e625267c600b19c
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
healthcheck:
test: redis-cli ping || exit 1
restart: always

4
docs/.gitignore vendored
View File

@@ -18,6 +18,4 @@
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn.lock
/static/openapi.json
yarn.lock

View File

@@ -1,7 +1,2 @@
build/
.docusaurus/
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View File

@@ -5,7 +5,7 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
### Installation
```
$ pnpm install
$ npm install
```
### Local Development

View File

@@ -2,6 +2,10 @@
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
:::note
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
:::
:::caution
Immich does not support being served on a sub-path such as `location /immich {`. It has to be served on the root path of a (sub)domain.
:::

View File

@@ -204,7 +204,7 @@ When the Dev Container starts, it automatically:
1. **Runs post-create script** (`container-server-post-create.sh`):
- Adjusts file permissions for the `node` user
- Installs dependencies: `pnpm install` in all packages
- Installs dependencies: `npm install` in all packages
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
2. **Starts development servers** via VS Code tasks:

View File

@@ -56,7 +56,7 @@ If you only want to do web development connected to an existing, remote backend,
1. Build the Immich SDK - `cd open-api/typescript-sdk && npm i && npm run build && cd -`
2. Enter the web directory - `cd web/`
3. Install web dependencies - `pnpm i`
3. Install web dependencies - `npm i`
4. Start the web development server
```bash

View File

@@ -5,7 +5,7 @@
### Unit tests
Unit are run by calling `npm run test` from the `server/` directory.
You need to run `pnpm install` (in `server/`) before _once_.
You need to run `npm install` (in `server/`) before _once_.
### End to end tests

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

View File

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 48 KiB

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

View File

Before

Width:  |  Height:  |  Size: 4.0 KiB

After

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@@ -2,9 +2,6 @@
sidebar_position: 80
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# TrueNAS [Community]
:::note
@@ -12,324 +9,211 @@ This is a community contribution and not officially supported by the Immich team
Community support can be found in the dedicated channel on the [Discord Server](https://discord.immich.app/).
**Please report app issues to the corresponding [GitHub Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
:::
:::warning
This guide covers the installation of Immich on TrueNAS Community Edition 24.10.2.2 (Electric Eel) and later.
We recommend keeping TrueNAS Community Edition and Immich relatively up to date with the latest versions to avoid any issues.
If you are using an older version of TrueNAS, we ask that you upgrade to the latest version before installing Immich. Check the [TrueNAS Community Edition Release Notes](https://www.truenas.com/docs/softwarereleases/) for more information on breaking changes, new features, and how to upgrade your system.
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
:::
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
## First Steps
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
When updates become available, TrueNAS alerts and provides easy updates.
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
You may also configure environment variables at any time after deploying the application.
### Setting up Storage Datasets
Before beginning app installation, [create the datasets](https://www.truenas.com/docs/scale/scaletutorials/storage/datasets/datasetsscale/) to use in the **Storage Configuration** section during installation.
In TrueNAS, Immich requires 2 datasets for the application to function correctly: `data` and `pgData`. You can set the datasets to any names to match your naming conventions or preferences.
You can organize these as one parent with two child datasets, for example `/mnt/tank/immich/data` and `/mnt/tank/immich/pgData`.
Immich requires seven datasets: `library`, `upload`, `thumbs`, `profile`, `video`, `backups`, and `pgData`.
You can organize these as one parent with seven child datasets, for example `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, and so on.
<img
src={require('./img/truenas/truenas00.webp').default}
width="40%"
src={require('./img/truenas12.webp').default}
width="30%"
alt="Immich App Widget"
className="border rounded-xl"
/>
:::info Datasets Permissions
:::info Permissions
The **pgData** dataset must be owned by the user `netdata` (UID 999) for postgres to start. The other datasets must be owned by the user `root` (UID 0) or a group that includes the user `root` (UID 0) for immich to have the necessary permissions.
The **pgData** dataset must be owned by the user `netdata` (UID 999) for Postgres to start.
The `data` dataset must have given the **_modify_** permission to the user who will run Immich.
Since TrueNAS Community Edition 24.10.2.2 and later, Immich can be run as any user and group, the default user being `apps` (UID 568) and the default group being `apps` (GID 568). This user, either `apps` or another user you choose, must have **_modify_** permissions on the **data** dataset.
For an easy setup:
- Create the parent dataset `immich` keeping the default **Generic** preset.
- Select `Dataset Preset` **Apps** instead of **Generic** when creating the `data` dataset. This will automatically give the correct permissions to the dataset. If you want to use another user for Immich, you can keep the **Generic** preset, but you will need to give the **_modify_** permission to that other user.
- For the `pgData` dataset, you can keep the default preset **Generic** as permissions can be set during the installation of the Immich app (See [Storage Configuration](#storage-configuration) section).
:::
:::tip
To improve performance, Immich recommends using SSDs for the database. If you have a pool made of SSDs, you can create the `pgData` dataset on that pool.
Thumbnails can also be stored on the SSDs for faster access. This is an advanced option and not required for Immich to run. More information on how you can use multiple datasets to manage Immich storage in a finer-grained manner can be found in the [Advanced: Multiple Datasets for Immich Storage](#advanced-multiple-datasets-for-immich-storage) section below.
:::
:::warning
If you just created the datasets using the **Apps** preset, you can skip this warning section.
If the **data** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library** (internal folder created by Immich within the **data** dataset), Immich performs `chmod` internally and must be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
To change or verify the ACL mode, go to the **Datasets** screen, select the **library** dataset, click on the **Edit** button next to **Dataset Details**, then click on the **Advanced Options** tab, scroll down to the **ACL Mode** section, and select `Passthrough` from the dropdown menu. Click **Save** to apply the changes. If the option is greyed out, set the **ACL Type** to `SMB/NFSv4` first, then you can change the **ACL Mode** to `Passthrough`.
If the **library** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/core/coretutorials/storage/pools/permissions/#access-control-lists) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library**, Immich performs `chmod` internally and needs to be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
:::
## Installing the Immich Application
To install the **Immich** application, go to **Apps**, click **Discover Apps**, and either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
To install the **Immich** application, go to **Apps**, click **Discover Apps**, either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
Click on the widget to open the **Immich** application details screen.
<img
src={require('./img/truenas/truenas01.webp').default}
src={require('./img/truenas01.webp').default}
width="50%"
alt="Immich App Widget"
className="border rounded-xl"
/>
</div>
Click on the widget to open the **Immich** application details screen.
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
<br/><br/>
Click **Install** to open the Immich application configuration screen.
<img
src={require('./img/truenas/truenas02.webp').default}
src={require('./img/truenas02.webp').default}
width="100%"
alt="Immich App Details Screen"
className="border rounded-xl"
/>
</div>
Click **Install** to open the Immich application configuration screen.
<br/><br/>
Application configuration settings are presented in several sections, each explained below.
To find specific fields, click in the **Search Input Fields** search field, scroll down to a particular section, or click on the section heading on the navigation area in the upper-right corner.
To find specific fields click in the **Search Input Fields** search field, scroll down to a particular section or click on the section heading on the navigation area in the upper-right corner.
### Application Name and Version
<img
src={require('./img/truenas/truenas03.webp').default}
src={require('./img/truenas03.webp').default}
width="100%"
alt="Install Immich Screen"
className="border rounded-xl mb-4"
className="border rounded-xl"
/>
Keep the default value or enter a name in the **Application Name** field.
Change it if youre deploying a second instance.
Accept the default value or enter a name in **Application Name** field.
In most cases use the default name, but if adding a second deployment of the application you must change this name.
Immich version within TrueNAS catalog (Different from Immich release version).
Accept the default version number in **Version**.
When a new version becomes available, the application has an update badge.
The **Installed Applications** screen shows the option to update applications.
### Immich Configuration
<img
src={require('./img/truenas/truenas04.webp').default}
src={require('./img/truenas05.webp').default}
width="40%"
alt="Configuration Settings"
className="border rounded-xl mb-4"
/>
The **Timezone** is set to the system default, which usually matches your local timezone. You can change it to another timezone if you prefer.
**Enable Machine Learning** is enabled by default. It allows Immich to use machine learning features such as face recognition, image search, and smart duplicate detection. Untick this option if you do not want to use these features.
Select the **Machine Learning Image Type** based on the hardware you have. More details here: [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md)
**Database Password** should be set to a custom value using only the characters `A-Za-z0-9`. This password is used to secure the Postgres database.
**Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`. Preferably, use a different password from the database password.
Keep the **Log Level** to the default `Log` value.
Leave **Hugging Face Endpoint** blank. (This is used to download ML models from a different source.)
Set **Database Storage Type** to the type of storage (**HDD** or **SSD**) that the pool where the **pgData** dataset is located uses.
**Additional Environment Variables** can be left blank.
<details>
<summary>Advanced users: Adding Environment Variables</summary>
Environment variables can be set by clicking the **Add** button and filling in the **Name** and **Value** fields.
<img
src={require('./img/truenas/truenas05.webp').default}
width="40%"
alt="Environment Variables"
className="border rounded-xl"
/>
These are used to add custom configuration options or to enable specific features.
More information on available environment variables can be found in the **[environment variables documentation](/docs/install/environment-variables/)**.
Accept the default value in **Timezone** or change to match your local timezone.
**Timezone** is only used by the Immich `exiftool` microservice if it cannot be determined from the image metadata.
:::info
Some environment variables are not available for the TrueNAS Community Edition app as they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
Untick **Enable Machine Learning** if you will not use face recognition, image search, and smart duplicate detection.
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
:::
Accept the default option or select the **Machine Learning Image Type** for your hardware based on the [Hardware-Accelerated Machine Learning Supported Backends](/docs/features/ml-hardware-acceleration.md#supported-backends).
</details>
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
### User and Group Configuration
The **Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`.
Application in TrueNAS runs as a specific user and group. Immich uses the default user `apps` (UID 568) and the default group `apps` (GID 568).
Accept the **Log Level** default of **Log**.
<img
src={require('./img/truenas/truenas06.webp').default}
width="40%"
alt="User and Group Configuration"
className="border rounded-xl"
/>
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
- **User ID**: Keep the default value `apps` (UID 568) or define a different one if needed.
- **Group ID**: Keep the default value `apps` (GID 568) or define a different one if needed.
:::warning
If you change the user or group, make sure that the datasets you created for Immich data storage have the correct permissions set for that user and group as specified in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above.
:::
Leave **Additional Environment Variables** blank or see [Environment Variables](#environment-variables) to set before installing.
### Network Configuration
<img
src={require('./img/truenas/truenas07.webp').default}
src={require('./img/truenas06.webp').default}
width="40%"
alt="Networking Settings"
className="border rounded-xl"
/>
- **Port Bind Mode**: This lets you expose the port to the host system, allowing you to access Immich from outside the TrueNAS system. Keep the default **_Publish port on the host for external access_** value unless you have a specific reason to change it.
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
:::info Allowed Port Numbers
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
- **Port Number**: Keep the default port `30041` or enter a custom port number.
- **Host IPs**: Leave the default blank value.
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
:::
### Storage Configuration
:::danger Default Settings (Not recommended)
The default setting for datasets is **ixVolume (dataset created automatically by the system)**. This is not recommended as this results in your data being harder to access manually and can result in data loss if you delete the immich app. It is also harder to manage snapshots and replication tasks. It is recommended to use the **Host Path (Path that already exists on the system)** option instead.
:::
The storage configuration section allows you to set up the storage locations for Immich data. You can select the datasets created in the previous step.
Immich requires seven storage datasets.
<img
src={require('./img/truenas/truenas08.webp').default}
width="40%"
alt="Configure Storage Volumes"
src={require('./img/truenas07.webp').default}
width="20%"
alt="Configure Storage ixVolumes"
className="border rounded-xl"
/>
For the Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Immich data storage, for example, `data`.
The Machine Learning cache can be left with default _Temporary_
For the Postgres Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Postgres data storage, for example, `pgData`.
:::info
**Postgres Data Storage**
Once **Host Path** is selected, a checkbox appears with **_Automatic Permissions_**. If you have not set the ownership of the **pgData** dataset to `netdata` (UID 999), tick this box as it will set the user ownership to `netdata` (UID 999) and the group ownership to `docker` (GID 999) automatically. If you have set the ownership of the **pgData** dataset to `netdata` (UID 999), you can leave this box unticked.
:::note Default Setting (Not recommended)
The default setting for datasets is **ixVolume (dataset created automatically by the system)** but this results in your data being harder to access manually and can result in data loss if you delete the immich app. (Not recommended)
:::
### Additional Storage (Advanced Users)
For each Storage option select **Host Path (Path that already exists on the system)** and then select the matching dataset [created before installing the app](#setting-up-storage-datasets): **Immich Library Storage**: `library`, **Immich Uploads Storage**: `upload`, **Immich Thumbs Storage**: `thumbs`, **Immich Profile Storage**: `profile`, **Immich Video Storage**: `video`, **Immich Backups Storage**: `backups`, **Postgres Data Storage**: `pgData`.
<details>
<summary>External Libraries</summary>
<img
src={require('./img/truenas08.webp').default}
width="40%"
alt="Configure Storage Host Paths"
className="border rounded-xl"
/>
The image above has example values.
<br/>
### Additional Storage [(External Libraries)](/docs/features/libraries)
:::danger Advanced Users Only
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup.
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup. Also, your mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`. The picture below shows a valid example.
:::
<img
src={require('./img/truenas/truenas09.webp').default}
src={require('./img/truenas10.webp').default}
width="40%"
alt="Add External Libraries with Additional Storage"
alt="Configure Storage Host Paths"
className="border rounded-xl"
/>
You may configure [external libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
The dataset that contains your external library files must at least give **read** access to the user running Immich (Default: `apps` (UID 568), `apps` (GID 568)).
If you want to be able to delete files or edit metadata in the external library using Immich, you will need to give the **modify** permission to the user running Immich.
- **Mount Path** is the location you will need to copy and paste into the external library settings within Immich.
- **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
- **Read Only** is a checkbox that you can tick if you want to prevent Immich from modifying the files in the external library. This is useful if you want to use Immich to view and search your external library without modifying it.
:::warning
Each mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`.
A general recommendation is to mount any external libraries to a path beginning with `/mnt` or `/media` followed by a unique name, such as `/mnt/external-libraries` or `/media/my-external-libraries`. If you plan to mount multiple external libraries, you can use paths like `/mnt/external-libraries/library1`, `/mnt/external-libraries/library2`, etc.
:::
</details>
<details>
<summary>Multiple Datasets for Immich Storage</summary>
:::danger Advanced Users Only
This feature should only be used by advanced users.
:::
Immich can use multiple datasets for its storage, allowing you to manage your data more granularly, similar to the old storage configuration. This is useful if you want to separate your data into different datasets for performance or organizational reasons. There is a general guide for this [here](/docs/guides/custom-locations), but read on for the TrueNAS guide.
Each additional dataset has to give the permission **_modify_** to the user who will run Immich (Default: `apps` (UID 568), `apps` (GID 568))
As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, you have to create the datasets with the **Apps** preset to ensure the correct permissions are set, or you can set the permissions manually after creating the datasets.
Immich uses 6 folders for its storage: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. You can create a dataset for each of these folders or only for some of them.
To mount these datasets:
1. Add an **Additional Storage** entry for each dataset you want to use.
2. Select **Type** as **Host Path (Path that already exists on the system)**.
3. Enter the **Mount Path** with `/data/<folder-name>`. The `<folder-name>` is the name of the folder you want to mount, for example, `library`, `upload`, `thumbs`, `profile`, `encoded-video`, or `backups`.
:::danger Important
You have to write the full path, including `/data/`, as Immich expects the data to be in that location.
If you do not include this path, Immich will not be able to find the data and will not write the data to the location you specified.
:::
4. Select the **Host Path** as the dataset you created for that folder, for example, `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, etc.
<img
src={require('./img/truenas/truenas10.webp').default}
width="40%"
alt="Use Multiple Datasets for Immich Storage with Additional Storage"
className="border rounded-xl"
/>
</details>
<!-- A section for Labels could be added, but I don't think it is needed as they are of no use for Immich. -->
<!-- A section for Labels would go here but I don't know what they do. -->
### Resources Configuration
<img
src={require('./img/truenas/truenas11.webp').default}
src={require('./img/truenas09.webp').default}
width="40%"
alt="Resource Limits"
className="border rounded-xl"
/>
- **CPU**: Depending on your system resources, you can keep the default value of `2` threads or specify a different number. Immich recommends at least `8` threads.
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
- **Memory**: Limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
Both **CPU** and **Memory** are limits, not reservations. This means that Immich can use up to the specified amount of CPU threads and RAM, but it will not reserve that amount of resources at all times. The system will allocate resources as needed, and Immich will use less than the specified amount most of the time.
:::info Older TrueNAS Versions
Before TrueNAS Community Edition version 24.10 Electric Eel:
- Enable **GPU Configuration** options if you have a GPU or CPU with integrated graphics that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md).
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
The process for NVIDIA GPU passthrough requires additional steps.
More details here: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
:::
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
### Install
Finally, click **Install**.
The system opens the **Installed Applications** screen with the Immich app in the **Deploying** state.
When the installation completes, it changes to **Running**.
When the installation completes it changes to **Running**.
<img
src={require('./img/truenas/truenas12.webp').default}
src={require('./img/truenas04.webp').default}
width="100%"
alt="Immich Installed"
className="border rounded-xl"
/>
Click **Web Portal** on the **Application Info** widget, or go to the URL `http://<your-truenas-ip>:30041` in your web browser to open the Immich web interface. This will show you the onboarding process to set up your first user account, which will be an administrator account.
After that, you can start using Immich to upload and manage your photos and videos.
Click **Web Portal** on the **Application Info** widget to open the Immich web interface to set up your account and begin uploading photos.
:::tip
For more information on how to use the application once installed, please refer to the [Post Install](/docs/install/post-install.mdx) guide.
@@ -344,6 +228,23 @@ For more information on how to use the application once installed, please refer
- Click **Update** at the very bottom of the page to save changes.
- TrueNAS automatically updates, recreates, and redeploys the Immich container with the updated settings.
## Environment Variables
You can set [Environment Variables](/docs/install/environment-variables) by clicking **Add** on the **Additional Environment Variables** option and filling in the **Name** and **Value**.
<img
src={require('./img/truenas11.webp').default}
width="40%"
alt="Environment Variables"
className="border rounded-xl"
/>
:::info
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
:::
## Updating the App
:::danger
@@ -360,116 +261,3 @@ To update the app to the latest version:
- You may view the Changelog.
- Click **Upgrade** to begin the process and open a counter dialog that shows the upgrade progress.
- When complete, the update badge and buttons disappear and the application Update state on the Installed screen changes from Update Available to Up to date.
## Migration
:::danger
Perform a backup of your Immich data before proceeding with the migration steps below. This is crucial to prevent any data loss if something goes wrong during the migration process.
The migration should also be performed when the Immich app is not running to ensure no data is being written while you are copying the data.
:::
### Migration from Old Storage Configuration
There are two ways to migrate from the old storage configuration to the new one, depending on whether you want to keep the old multiple datasets or if you want to move to a double dataset configuration with a single dataset for Immich data storage and a single dataset for Postgres data storage.
:::note Old TrueNAS Versions Permissions
If you were using an older version of TrueNAS (before 24.10.2.2), the datasets, except the one for **pgData** had only to be owned by the `root` user (UID 0). You might have to add the **modify** permission to the `apps` user (UID 568) or the user you want to run Immich as, to all of them, except **pgData**. The steps to add or change ACL permissions are described in the [TrueNAS documentation](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/).
:::
<Tabs groupId="truenas-migration-tabs">
<TabItem value="migrate-new-dataset" label="Migrate data to a new dataset (recommended)" default>
To migrate from the old storage configuration to the new one, you will need to create a new dataset for the Immich data storage and copy the data from the old datasets to the new ones. The steps are as follows:
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are copying the data.
2. **Create a new dataset** for the Immich data storage, for example, `data`. As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, create the dataset with the **Apps** preset to ensure the correct permissions are set.
3. **Copy the data** from the old datasets to the new dataset. We advise using the `rsync` command to copy the data, as it will preserve the permissions and ownership of the files. The following commands are examples:
```bash
rsync -av /mnt/tank/immich/library/ /mnt/tank/immich/data/library/
rsync -av /mnt/tank/immich/upload/ /mnt/tank/immich/data/upload/
rsync -av /mnt/tank/immich/thumbs/ /mnt/tank/immich/data/thumbs/
rsync -av /mnt/tank/immich/profile/ /mnt/tank/immich/data/profile/
rsync -av /mnt/tank/immich/video/ /mnt/tank/immich/data/encoded-video/
rsync -av /mnt/tank/immich/backups/ /mnt/tank/immich/data/backups/
```
Make sure to replace `/mnt/tank/immich/` with the correct path to your old datasets and `/mnt/tank/immich/data/` with the correct path to your new dataset.
:::tip
If you were using **ixVolume (dataset created automatically by the system)** for Immich data storage, the path to the data should be `/mnt/.ix-apps/app_mounts/immich/`. You have to use this path instead of `/mnt/tank/immich/` in the `rsync` command above, for example:
```bash
rsync -av /mnt/.ix-apps/app_mounts/immich/library/ /mnt/tank/immich/data/library/
```
If you were also using an ixVolume for Postgres data storage, you also should, first create the pgData dataset, as described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, and then you can use the following command to copy the Postgres data:
```bash
rsync -av /mnt/.ix-apps/app_mounts/immich/pgData/ /mnt/tank/immich/pgData/
```
:::
:::warning
Make sure that for each folder, the `.immich` file is copied as well, as it contains important metadata for Immich. If for some reason the `.immich` file is not copied, you can copy it manually with the `rsync` command, for example:
```bash
rsync -av /mnt/tank/immich/library/.immich /mnt/tank/immich/data/library/
```
Replace `library` with the name of the folder where you are copying the file.
:::
4. **Update the permissions** as the permissions of the data that have been copied has been preserved, to ensure that the `apps` user (UID 568) has the correct permissions on all the copied data. If you just created the dataset with the **Apps** preset, from the TrueNAS web interface, go to the **Datasets** screen, select the **data** dataset, click on the **Edit** button next to **Permissions**, tick the "Apply permissions recursively" checkbox, and click **Save**. This will apply the correct permissions to all the copied data.
5. **Update the Immich app** to use the new dataset:
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
- Click **Edit** on the **Application Info** widget.
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
- For the **Data Storage**, select **Host Path (Path that already exists on the system)** and then select the new dataset you created for Immich data storage, for example, `data`.
- For the **Postgres Data Storage**, verify that it is still set to the dataset you created for Postgres data storage, for example, `pgData`.
- Click **Update** at the bottom of the page to save changes.
6. **Start the Immich app** from the TrueNAS web interface.
This will recreate the Immich container with the new storage configuration and start the app.
If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data has been copied correctly by checking the Immich web interface and ensuring that all your photos and videos are still available. You may delete the old datasets, if you no longer need them, using the TrueNAS web interface.
If you were using **ixVolume (dataset created automatically by the system)** or folders for Immich data storage, you can delete the old datasets using the following commands:
```bash
rm -r /mnt/.ix-apps/app_mounts/immich/library
rm -r /mnt/.ix-apps/app_mounts/immich/uploads
rm -r /mnt/.ix-apps/app_mounts/immich/thumbs
rm -r /mnt/.ix-apps/app_mounts/immich/profile
rm -r /mnt/.ix-apps/app_mounts/immich/video
rm -r /mnt/.ix-apps/app_mounts/immich/backups
```
</TabItem>
<TabItem value="migrate-old-dataset" label="Keep the existing datasets">
To migrate from the old storage configuration to the new one without creating new datasets.
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are updating the app.
2. **Update the datasets permissions**: Ensure that the datasets used for Immich data storage (`library`, `upload`, `thumbs`, `profile`, `video`, `backups`) have the correct permissions set for the user who will run Immich. The user should have ***modify*** permissions on these datasets. The default user for Immich is `apps` (UID 568) and the default group is `apps` (GID 568). If you are using a different user, make sure to set the permissions accordingly. You can do this from the TrueNAS web interface by going to the **Datasets** screen, selecting each dataset, clicking on the **Edit** button next to **Permissions**, and adding the user with ***modify*** permissions.
3. **Update the Immich app** to use the existing datasets:
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
- Click **Edit** on the **Application Info** widget.
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
- For the **Data Storage**, you can keep the **ixVolume (dataset created automatically by the system)** as no data will be directly written to it. We recommend selecting **Host Path (Path that already exists on the system)** and then select a **new** dataset you created for Immich data storage, for example, `data`.
- For the **Postgres Data Storage**, keep **Host Path (Path that already exists on the system)** and then select the existing dataset you used for Postgres data storage, for example, `pgData`.
- Following the instructions in the [Multiple Datasets for Immich Storage](#additional-storage-advanced-users) section, you can add, **for each old dataset**, a new Additional Storage with the following settings:
- **Type**: `Host Path (Path that already exists on the system)`
- **Mount Path**: `/data/<folder-name>` (e.g. `/data/library`)
- **Host Path**: `/mnt/<your-pool-name>/<dataset-name>` (e.g. `/mnt/tank/immich/library`)
:::danger Ensure using the correct paths names
Make sure to replace `<folder-name>` with the actual name of the folder used by Immich: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. Also, replace `<your-pool-name>` and `<dataset-name>` with the actual names of your pool and dataset.
:::
- **Read Only**: Keep it unticked as Immich needs to write to these datasets.
- Click **Update** at the bottom of the page to save changes.
4. **Start the Immich app** from the TrueNAS web interface. This will recreate the Immich container with the new storage configuration and start the app. If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data is still available by checking the Immich web interface and ensuring that all your photos and videos are still accessible.
</TabItem>
</Tabs>

View File

@@ -27,102 +27,3 @@ docker image prune
[watchtower]: https://containrrr.dev/watchtower/
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
[releases]: https://github.com/immich-app/immich/releases
## Migrating to VectorChord
:::info
If you deploy Immich using Docker Compose, see `ghcr.io/immich-app/postgres` in the `docker-compose.yml` file and have not explicitly set the `DB_VECTOR_EXTENSION` environmental variable, your Immich database is already using VectorChord and this section does not apply to you.
:::
:::important
If you do not deploy Immich using Docker Compose and see a deprecation warning for pgvecto.rs on server startup, you should refer to the maintainers of the Immich distribution for guidance (if using a turnkey solution) or adapt the instructions for your specific setup.
:::
Immich has migrated off of the deprecated pgvecto.rs database extension to its successor, [VectorChord](https://github.com/tensorchord/VectorChord), which comes with performance improvements in almost every aspect. This section will guide you on how to make this change in a Docker Compose setup.
Before making any changes, please [back up your database](/docs/administration/backup-and-restore). While every effort has been made to make this migration as smooth as possible, theres always a chance that something can go wrong.
After making a backup, please modify your `docker-compose.yml` file with the following information.
```diff
[...]
database:
container_name: immich_postgres
- image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
+ image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums'
+ # Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
+ # DB_STORAGE_TYPE: 'HDD'
volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
- healthcheck:
- test: >-
- pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
- Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
- --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
- echo "checksum failure count is $$Chksum";
- [ "$$Chksum" = '0' ] || exit 1
- interval: 5m
- start_interval: 30s
- start_period: 5m
- command: >-
- postgres
- -c shared_preload_libraries=vectors.so
- -c 'search_path="$$user", public, vectors'
- -c logging_collector=on
- -c max_wal_size=2GB
- -c shared_buffers=512MB
- -c wal_compression=on
+ shm_size: 128mb
restart: always
[...]
```
:::important
If you deviated from the defaults of pg14 or pgvectors0.2.0, you must adjust the pg major version and pgvecto.rs version. If you are still using the default `docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0` image, you can just follow the changes above. For example, if the previous image is `docker.io/tensorchord/pgvecto-rs:pg16-v0.3.0`, the new image should be `ghcr.io/immich-app/postgres:16-vectorchord0.3.0-pgvectors0.3.0` instead of the image specified in the diff.
:::
After making these changes, you can start Immich as normal. Immich will make some changes to the DB during startup, which can take seconds to minutes to finish, depending on hardware and library size. In particular, its normal for the server logs to be seemingly stuck at `Reindexing clip_index` and `Reindexing face_index`for some time if you have over 100k assets in Immich and/or Immich is on a relatively weak server. If you see these logs and there are no errors, just give it time.
:::danger
After switching to VectorChord, you should not downgrade Immich below 1.133.0.
:::
Please dont hesitate to contact us on [GitHub](https://github.com/immich-app/immich/discussions) or [Discord](https://discord.immich.app/) if you encounter migration issues.
### VectorChord FAQ
#### I have a separate PostgreSQL instance shared with multiple services. How can I switch to VectorChord?
Please see the [standalone PostgreSQL documentation](/docs/administration/postgres-standalone#migrating-to-vectorchord) for migration instructions. The migration path will be different depending on whether youre currently using pgvecto.rs or pgvector, as well as whether Immich has superuser DB permissions.
#### Why are so many lines removed from the `docker-compose.yml` file? Does this mean the health check is removed?
These lines are now incorporated into the image itself along with some additional tuning.
#### What does this change mean for my existing DB backups?
The new DB image includes pgvector and pgvecto.rs in addition to VectorChord, so you can use this image to restore from existing backups that used either of these extensions. However, backups made after switching to VectorChord require an image containing VectorChord to restore successfully.
#### Do I still need pgvecto.rs installed after migrating to VectorChord?
pgvecto.rs only needs to be available during the migration, or if you need to restore from a backup that used pgvecto.rs. For a leaner DB and a smaller image, you can optionally switch to an image variant that doesnt have pgvecto.rs installed after youve performed the migration and started Immich: `ghcr.io/immich-app/postgres:14-vectorchord0.4.3`, changing the PostgreSQL version as appropriate.
#### Why does it matter whether my database is on an SSD or an HDD?
These storage mediums have different performance characteristics. As a result, the optimal settings for an SSD are not the same as those for an HDD. Either configuration is compatible with SSD and HDD, but using the right configuration will make Immich snappier. As a general tip, we recommend users store the database on an SSD whenever possible.
#### Can I use the new database image as a general PostgreSQL image outside of Immich?
Its a standard PostgreSQL container image that additionally contains the VectorChord, pgvector, and (optionally) pgvecto.rs extensions. If you were using the previous pgvecto.rs image for other purposes, you can similarly do so with this image.
#### If pgvecto.rs and pgvector still work, why should I switch to VectorChord?
VectorChord is faster, more stable, uses less RAM, and (with the settings Immich uses) offers higher-quality results than pgvector and pgvecto.rs. This translates to better search and facial recognition experiences. In addition, pgvecto.rs support will be dropped in the future, so changing it sooner will avoid disruption.

20545
docs/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -7,8 +7,7 @@
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "docusaurus start --port 3005",
"copy:openapi": "jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"build": "npm run copy:openapi && docusaurus build",
"build": "docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",

View File

@@ -16,9 +16,6 @@ import {
mdiCloudKeyOutline,
mdiRegex,
mdiCodeJson,
mdiClockOutline,
mdiAccountOutline,
mdiRestart,
} from '@mdi/js';
import Layout from '@theme/Layout';
import React from 'react';
@@ -29,42 +26,6 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
const items: Item[] = [
{
icon: mdiClockOutline,
iconColor: 'gray',
title: 'setTimeout is cursed',
description:
'The setTimeout method in JavaScript is cursed when used with small values because the implementation may or may not actually wait the specified time.',
link: {
url: 'https://github.com/immich-app/immich/pull/20655',
text: '#20655',
},
date: new Date(2025, 7, 4),
},
{
icon: mdiAccountOutline,
iconColor: '#DAB1DA',
title: 'PostgreSQL USER is cursed',
description:
'The USER keyword in PostgreSQL is cursed because you can select from it like a table, which leads to confusion if you have a table name user as well.',
link: {
url: 'https://github.com/immich-app/immich/pull/19891',
text: '#19891',
},
date: new Date(2025, 7, 4),
},
{
icon: mdiRestart,
iconColor: '#8395e3',
title: 'PostgreSQL RESET is cursed',
description:
'PostgreSQL RESET is cursed because it is impossible to RESET a PostgreSQL extension parameter if the extension has been uninstalled.',
link: {
url: 'https://github.com/immich-app/immich/pull/19363',
text: '#19363',
},
date: new Date(2025, 5, 20),
},
{
icon: mdiRegex,
iconColor: 'purple',

View File

@@ -1,12 +1,4 @@
[
{
"label": "v1.138.1",
"url": "https://v1.138.1.archive.immich.app"
},
{
"label": "v1.138.0",
"url": "https://v1.138.0.archive.immich.app"
},
{
"label": "v1.137.3",
"url": "https://v1.137.3.archive.immich.app"

1
e2e/.gitignore vendored
View File

@@ -3,4 +3,3 @@ node_modules/
/playwright-report/
/blob-report/
/playwright/.cache/
/dist

7419
e2e/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.138.1",
"version": "1.137.3",
"description": "",
"main": "index.js",
"type": "module",

View File

@@ -683,7 +683,7 @@ describe('/albums', () => {
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ role: AlbumUserRole.Editor });
expect(status).toBe(204);
expect(status).toBe(200);
// Get album to verify the role change
const { body } = await request(app)

View File

@@ -555,7 +555,7 @@ describe('/asset', () => {
expect(body).toMatchObject({ id: user1Assets[0].id, livePhotoVideoId: null });
});
it.skip('should update date time original when sidecar file contains DateTimeOriginal', async () => {
it('should update date time original when sidecar file contains DateTimeOriginal', async () => {
const sidecarData = `<?xpacket begin='?' id='W5M0MpCehiHzreSzNTczkc9d'?>
<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='Image::ExifTool 12.40'>
<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>
@@ -854,30 +854,6 @@ describe('/asset', () => {
});
});
describe('PUT /assets', () => {
it('should update date time original relatively', async () => {
const { status, body } = await request(app)
.put(`/assets/`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ ids: [user1Assets[0].id], dateTimeRelative: -1441 });
expect(body).toEqual({});
expect(status).toEqual(204);
const result = await request(app)
.get(`/assets/${user1Assets[0].id}`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send();
expect(result.body).toMatchObject({
id: user1Assets[0].id,
exifInfo: expect.objectContaining({
dateTimeOriginal: '2023-11-19T01:10:00+00:00',
}),
});
});
});
describe('POST /assets', () => {
beforeAll(setupTests, 30_000);

View File

@@ -116,7 +116,7 @@ describe('/partners', () => {
.delete(`/partners/${user3.userId}`)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(204);
expect(status).toBe(200);
});
it('should throw a bad request if partner not found', async () => {

View File

@@ -485,7 +485,7 @@ describe('/shared-links', () => {
.delete(`/shared-links/${linkWithAlbum.id}`)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(204);
expect(status).toBe(200);
});
});
});

View File

@@ -304,7 +304,7 @@ describe('/users', () => {
const { status } = await request(app)
.delete(`/users/me/license`)
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(204);
expect(status).toBe(200);
});
});
});

View File

@@ -79,7 +79,7 @@ export const tempDir = tmpdir();
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
export const immichCli = (args: string[]) =>
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
export const immichAdmin = (args: string[]) =>
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];

View File

@@ -28,9 +28,6 @@
"add_to_album": "Add to album",
"add_to_album_bottom_sheet_added": "Added to {album}",
"add_to_album_bottom_sheet_already_exists": "Already in {album}",
"add_to_album_toggle": "Toggle selection for {album}",
"add_to_albums": "Add to albums",
"add_to_albums_count": "Add to albums ({count})",
"add_to_shared_album": "Add to shared album",
"add_url": "Add URL",
"added_to_archive": "Added to archive",
@@ -358,9 +355,6 @@
"trash_number_of_days_description": "Number of days to keep the assets in trash before permanently removing them",
"trash_settings": "Trash Settings",
"trash_settings_description": "Manage trash settings",
"unlink_all_oauth_accounts": "Unlink all OAuth accounts",
"unlink_all_oauth_accounts_description": "Remember to unlink all OAuth accounts before migrating to a new provider.",
"unlink_all_oauth_accounts_prompt": "Are you sure you want to unlink all OAuth accounts? This will reset the OAuth ID for each user and cannot be undone.",
"user_cleanup_job": "User cleanup",
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
"user_delete_delay_settings": "Delete delay",
@@ -500,9 +494,7 @@
"assets": "Assets",
"assets_added_count": "Added {count, plural, one {# asset} other {# assets}}",
"assets_added_to_album_count": "Added {count, plural, one {# asset} other {# assets}} to the album",
"assets_added_to_albums_count": "Added {assetTotal} assets to {albumTotal} albums",
"assets_cannot_be_added_to_album_count": "{count, plural, one {Asset} other {Assets}} cannot be added to the album",
"assets_cannot_be_added_to_albums": "{count, plural, one {Asset} other {Assets}} cannot be added to any of the albums",
"assets_count": "{count, plural, one {# asset} other {# assets}}",
"assets_deleted_permanently": "{count} asset(s) deleted permanently",
"assets_deleted_permanently_from_server": "{count} asset(s) deleted permanently from the Immich server",
@@ -519,7 +511,6 @@
"assets_trashed_count": "Trashed {count, plural, one {# asset} other {# assets}}",
"assets_trashed_from_server": "{count} asset(s) trashed from the Immich server",
"assets_were_part_of_album_count": "{count, plural, one {Asset was} other {Assets were}} already part of the album",
"assets_were_part_of_albums_count": "{count, plural, one {Asset was} other {Assets were}} already part of the albums",
"authorized_devices": "Authorized Devices",
"automatic_endpoint_switching_subtitle": "Connect locally over designated Wi-Fi when available and use alternative connections elsewhere",
"automatic_endpoint_switching_title": "Automatic URL switching",
@@ -758,7 +749,6 @@
"date_of_birth_saved": "Date of birth saved successfully",
"date_range": "Date range",
"day": "Day",
"days": "Days",
"deduplicate_all": "Deduplicate All",
"deduplication_criteria_1": "Image size in bytes",
"deduplication_criteria_2": "Count of EXIF data",
@@ -843,12 +833,10 @@
"edit": "Edit",
"edit_album": "Edit album",
"edit_avatar": "Edit avatar",
"edit_birthday": "Edit birthday",
"edit_birthday": "Edit Birthday",
"edit_date": "Edit date",
"edit_date_and_time": "Edit date and time",
"edit_date_and_time_action_prompt": "{count} date and time edited",
"edit_date_and_time_by_offset": "Change date by offset",
"edit_date_and_time_by_offset_interval": "New date range: {from} - {to}",
"edit_description": "Edit description",
"edit_description_prompt": "Please select a new description:",
"edit_exclusion_pattern": "Edit exclusion pattern",
@@ -921,7 +909,6 @@
"failed_to_load_notifications": "Failed to load notifications",
"failed_to_load_people": "Failed to load people",
"failed_to_remove_product_key": "Failed to remove product key",
"failed_to_reset_pin_code": "Failed to reset PIN code",
"failed_to_stack_assets": "Failed to stack assets",
"failed_to_unstack_assets": "Failed to un-stack assets",
"failed_to_update_notification_status": "Failed to update notification status",
@@ -930,7 +917,6 @@
"paths_validation_failed": "{paths, plural, one {# path} other {# paths}} failed validation",
"profile_picture_transparent_pixels": "Profile pictures cannot have transparent pixels. Please zoom in and/or move the image.",
"quota_higher_than_disk_size": "You set a quota higher than the disk size",
"something_went_wrong": "Something went wrong",
"unable_to_add_album_users": "Unable to add users to album",
"unable_to_add_assets_to_shared_link": "Unable to add assets to shared link",
"unable_to_add_comment": "Unable to add comment",
@@ -1021,6 +1007,9 @@
"exif_bottom_sheet_location": "LOCATION",
"exif_bottom_sheet_people": "PEOPLE",
"exif_bottom_sheet_person_add_person": "Add name",
"exif_bottom_sheet_person_age_months": "Age {months} months",
"exif_bottom_sheet_person_age_year_months": "Age 1 year, {months} months",
"exif_bottom_sheet_person_age_years": "Age {years}",
"exit_slideshow": "Exit Slideshow",
"expand_all": "Expand all",
"experimental_settings_new_asset_list_subtitle": "Work in progress",
@@ -1062,13 +1051,11 @@
"filter_people": "Filter people",
"filter_places": "Filter places",
"find_them_fast": "Find them fast by name with search",
"first": "First",
"fix_incorrect_match": "Fix incorrect match",
"folder": "Folder",
"folder_not_found": "Folder not found",
"folders": "Folders",
"folders_feature_description": "Browsing the folder view for the photos and videos on the file system",
"forgot_pin_code_question": "Forgot your PIN?",
"forward": "Forward",
"gcast_enabled": "Google Cast",
"gcast_enabled_description": "This feature loads external resources from Google in order to work.",
@@ -1123,7 +1110,6 @@
"home_page_upload_err_limit": "Can only upload a maximum of 30 assets at a time, skipping",
"host": "Host",
"hour": "Hour",
"hours": "Hours",
"id": "ID",
"idle": "Idle",
"ignore_icloud_photos": "Ignore iCloud photos",
@@ -1184,7 +1170,6 @@
"language_search_hint": "Search languages...",
"language_setting_description": "Select your preferred language",
"large_files": "Large Files",
"last": "Last",
"last_seen": "Last seen",
"latest_version": "Latest Version",
"latitude": "Latitude",
@@ -1203,7 +1188,6 @@
"library_page_sort_title": "Album title",
"licenses": "Licenses",
"light": "Light",
"like": "Like",
"like_deleted": "Like deleted",
"link_motion_video": "Link motion video",
"link_to_oauth": "Link to OAuth",
@@ -1314,7 +1298,6 @@
"merged_people_count": "Merged {count, plural, one {# person} other {# people}}",
"minimize": "Minimize",
"minute": "Minute",
"minutes": "Minutes",
"missing": "Missing",
"model": "Model",
"month": "Month",
@@ -1388,7 +1371,6 @@
"oauth": "OAuth",
"official_immich_resources": "Official Immich Resources",
"offline": "Offline",
"offset": "Offset",
"ok": "Ok",
"oldest_first": "Oldest first",
"on_this_device": "On this device",
@@ -1466,9 +1448,6 @@
"permission_onboarding_permission_limited": "Permission limited. To let Immich backup and manage your entire gallery collection, grant photo and video permissions in Settings.",
"permission_onboarding_request": "Immich requires permission to view your photos and videos.",
"person": "Person",
"person_age_months": "{months, plural, one {# month} other {# months}} old",
"person_age_year_months": "1 year, {months, plural, one {# month} other {# months}} old",
"person_age_years": "{years, plural, other {# years}} old",
"person_birthdate": "Born on {date}",
"person_hidden": "{name}{hidden, select, true { (hidden)} other {}}",
"photo_shared_all_users": "Looks like you shared your photos with all users or you don't have any user to share with.",
@@ -1614,9 +1593,6 @@
"reset_password": "Reset password",
"reset_people_visibility": "Reset people visibility",
"reset_pin_code": "Reset PIN code",
"reset_pin_code_description": "If you forgot your PIN code, you can contact the server administrator to reset it",
"reset_pin_code_success": "Successfully reset PIN code",
"reset_pin_code_with_password": "You can always reset your PIN code with your password",
"reset_sqlite": "Reset SQLite Database",
"reset_sqlite_confirmation": "Are you sure you want to reset the SQLite database? You will need to log out and log in again to resync the data",
"reset_sqlite_success": "Successfully reset the SQLite database",
@@ -1865,7 +1841,6 @@
"sort_created": "Date created",
"sort_items": "Number of items",
"sort_modified": "Date modified",
"sort_newest": "Newest photo",
"sort_oldest": "Oldest photo",
"sort_people_by_similarity": "Sort people by similarity",
"sort_recent": "Most recent photo",

View File

@@ -1,6 +1,6 @@
ARG DEVICE=cpu
FROM python:3.11-bookworm@sha256:c642d5dfaf9115a12086785f23008558ae2e13bcd0c4794536340bcb777a4381 AS builder-cpu
FROM python:3.11-bookworm@sha256:ce3b954c9285a7a145cba620bae03db836ab890b6b9e0d05a3ca522ea00dfbc9 AS builder-cpu
FROM builder-cpu AS builder-openvino
@@ -59,7 +59,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
RUN apt-get update && apt-get install -y --no-install-recommends g++
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:f64ad69940b634e75d2e4d799eb5238066c5eeda49f76e782d4873c3d014ea33 /uv /uvx /bin/
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9653efd4380d5a0e5511e337dcfc3b8ba5bc4e6ea7fa3be7716598261d5503fa /uv /uvx /bin/
RUN --mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
@@ -68,11 +68,11 @@ RUN if [ "$DEVICE" = "rocm" ]; then \
uv pip install /opt/onnxruntime_rocm-*.whl; \
fi
FROM python:3.11-slim-bookworm@sha256:838ff46ae6c481e85e369706fa3dea5166953824124735639f3c9f52af85f319 AS prod-cpu
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-cpu
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
FROM python:3.11-slim-bookworm@sha256:838ff46ae6c481e85e369706fa3dea5166953824124735639f3c9f52af85f319 AS prod-openvino
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-openvino
RUN apt-get update && \
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \

View File

@@ -36,7 +36,7 @@ def to_numpy(img: Image.Image) -> NDArray[np.float32]:
def normalize(
img: NDArray[np.float32], mean: float | NDArray[np.float32], std: float | NDArray[np.float32]
) -> NDArray[np.float32]:
return (img - mean) / std
return np.divide(img - mean, std, dtype=np.float32)
def get_pil_resampling(resample: str) -> Image.Resampling:
@@ -58,13 +58,11 @@ def decode_pil(image_bytes: bytes | IO[bytes] | Image.Image) -> Image.Image:
def decode_cv2(image_bytes: NDArray[np.uint8] | bytes | Image.Image) -> NDArray[np.uint8]:
match image_bytes:
case bytes() | memoryview() | bytearray():
return pil_to_cv2(decode_pil(image_bytes)) # pillow is much faster than cv2
case Image.Image():
return pil_to_cv2(image_bytes)
case _:
return image_bytes
if isinstance(image_bytes, bytes):
image_bytes = decode_pil(image_bytes) # pillow is much faster than cv2
if isinstance(image_bytes, Image.Image):
return pil_to_cv2(image_bytes)
return image_bytes
def clean_text(text: str, canonicalize: bool = False) -> str:

View File

@@ -112,4 +112,8 @@ def has_profiling(obj: Any) -> TypeGuard[HasProfiling]:
return hasattr(obj, "profiling") and isinstance(obj.profiling, dict)
def is_ndarray(obj: Any, dtype: "type[np._DTypeScalar_co]") -> "TypeGuard[npt.NDArray[np._DTypeScalar_co]]":
return isinstance(obj, np.ndarray) and obj.dtype == dtype
T = TypeVar("T")

View File

@@ -12,7 +12,6 @@ dependencies = [
"gunicorn>=21.1.0",
"huggingface-hub>=0.20.1,<1.0",
"insightface>=0.7.3,<1.0",
"numpy<2",
"opencv-python-headless>=4.7.0.72,<5.0",
"orjson>=3.9.5",
"pillow>=9.5.0,<11.0",

773
machine-learning/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@
<application android:label="Immich" android:name=".ImmichApp" android:usesCleartextTraffic="true"
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true"
android:largeHeap="true" android:enableOnBackInvokedCallback="false" android:allowBackup="false">
android:largeHeap="true" android:enableOnBackInvokedCallback="false">
<service
android:name="androidx.work.impl.foreground.SystemForegroundService"

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 3004,
"android.injected.version.name" => "1.138.1",
"android.injected.version.code" => 3002,
"android.injected.version.name" => "1.137.3",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -19,7 +19,6 @@ targets:
- lib/infrastructure/entities/*.dart
- lib/infrastructure/entities/*.drift
- lib/infrastructure/repositories/db.repository.dart
- lib/infrastructure/repositories/logger_db.repository.dart
drift_dev:modular:
enabled: true
options: *drift_options

File diff suppressed because one or more lines are too long

View File

@@ -4,7 +4,6 @@ import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
import 'package:immich_mobile/main.dart' as app;
import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
@@ -41,14 +40,16 @@ class ImmichTestHelper {
await EasyLocalization.ensureInitialized();
// Clear all data from Isar (reuse existing instance if available)
final db = await Bootstrap.initIsar();
final logDb = DriftLogger();
await Bootstrap.initDomain(db, logDb);
await Bootstrap.initDomain(db);
await Store.clear();
await db.writeTxn(() => db.clear());
// Load main Widget
await tester.pumpWidget(
ProviderScope(
overrides: [dbProvider.overrideWithValue(db), isarProvider.overrideWithValue(db)],
overrides: [
dbProvider.overrideWithValue(db),
isarProvider.overrideWithValue(db),
],
child: const app.MainWidget(),
),
);
@@ -58,11 +59,18 @@ class ImmichTestHelper {
}
@isTest
void immichWidgetTest(String description, Future<void> Function(WidgetTester, ImmichTestHelper) test) {
testWidgets(description, (widgetTester) async {
await ImmichTestHelper.loadApp(widgetTester);
await test(widgetTester, ImmichTestHelper(widgetTester));
}, semanticsEnabled: false);
void immichWidgetTest(
String description,
Future<void> Function(WidgetTester, ImmichTestHelper) test,
) {
testWidgets(
description,
(widgetTester) async {
await ImmichTestHelper.loadApp(widgetTester);
await test(widgetTester, ImmichTestHelper(widgetTester));
},
semanticsEnabled: false,
);
}
Future<void> pumpUntilFound(
@@ -71,7 +79,8 @@ Future<void> pumpUntilFound(
Duration timeout = const Duration(seconds: 120),
}) async {
bool found = false;
final timer = Timer(timeout, () => throw TimeoutException("Pump until has timed out"));
final timer =
Timer(timeout, () => throw TimeoutException("Pump until has timed out"));
while (found != true) {
await tester.pump();
found = tester.any(finder);

View File

@@ -22,7 +22,7 @@ platform :ios do
path: "./Runner.xcodeproj",
)
increment_version_number(
version_number: "1.138.1"
version_number: "1.137.3"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -3,7 +3,7 @@ const double downloadCompleted = -1;
const double downloadFailed = -2;
// Number of log entries to retain on app start
const int kLogTruncateLimit = 2000;
const int kLogTruncateLimit = 250;
// Sync
const int kSyncEventBatchSize = 5000;

View File

@@ -23,7 +23,6 @@ class RemoteAlbum {
final AlbumAssetOrder order;
final int assetCount;
final String ownerName;
final bool isShared;
const RemoteAlbum({
required this.id,
@@ -37,7 +36,6 @@ class RemoteAlbum {
required this.order,
required this.assetCount,
required this.ownerName,
required this.isShared,
});
@override
@@ -54,7 +52,6 @@ class RemoteAlbum {
thumbnailAssetId: ${thumbnailAssetId ?? "<NA>"}
assetCount: $assetCount
ownerName: $ownerName
isShared: $isShared
}''';
}
@@ -72,8 +69,7 @@ class RemoteAlbum {
isActivityEnabled == other.isActivityEnabled &&
order == other.order &&
assetCount == other.assetCount &&
ownerName == other.ownerName &&
isShared == other.isShared;
ownerName == other.ownerName;
}
@override
@@ -88,8 +84,7 @@ class RemoteAlbum {
isActivityEnabled.hashCode ^
order.hashCode ^
assetCount.hashCode ^
ownerName.hashCode ^
isShared.hashCode;
ownerName.hashCode;
}
RemoteAlbum copyWith({
@@ -104,7 +99,6 @@ class RemoteAlbum {
AlbumAssetOrder? order,
int? assetCount,
String? ownerName,
bool? isShared,
}) {
return RemoteAlbum(
id: id ?? this.id,
@@ -118,7 +112,6 @@ class RemoteAlbum {
order: order ?? this.order,
assetCount: assetCount ?? this.assetCount,
ownerName: ownerName ?? this.ownerName,
isShared: isShared ?? this.isShared,
);
}
}

View File

@@ -1,18 +0,0 @@
import 'package:maplibre_gl/maplibre_gl.dart';
class Marker {
final LatLng location;
final String assetId;
const Marker({required this.location, required this.assetId});
@override
bool operator ==(covariant Marker other) {
if (identical(this, other)) return true;
return other.location == location && other.assetId == assetId;
}
@override
int get hashCode => location.hashCode ^ assetId.hashCode;
}

View File

@@ -6,6 +6,7 @@ import 'package:immich_mobile/infrastructure/repositories/local_album.repository
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:logging/logging.dart';
class HashService {
@@ -45,6 +46,7 @@ class HashService {
stopwatch.stop();
_log.info("Hashing took - ${stopwatch.elapsedMilliseconds}ms");
DLog.log("Hashing took - ${stopwatch.elapsedMilliseconds}ms");
}
/// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB
@@ -99,6 +101,7 @@ class HashService {
}
_log.fine("Hashed ${hashed.length}/${toHash.length} assets");
DLog.log("Hashed ${hashed.length}/${toHash.length} assets");
await _localAssetRepository.updateHashes(hashed);
await _storageRepository.clearCache();

View File

@@ -6,6 +6,7 @@ import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:immich_mobile/utils/diff.dart';
import 'package:logging/logging.dart';
import 'package:platform/platform.dart';
@@ -29,17 +30,19 @@ class LocalSyncService {
try {
if (full || await _nativeSyncApi.shouldFullSync()) {
_log.fine("Full sync request from ${full ? "user" : "native"}");
DLog.log("Full sync request from ${full ? "user" : "native"}");
return await fullSync();
}
final delta = await _nativeSyncApi.getMediaChanges();
if (!delta.hasChanges) {
_log.fine("No media changes detected. Skipping sync");
DLog.log("No media changes detected. Skipping sync");
return;
}
_log.fine("Delta updated: ${delta.updates.length}");
_log.fine("Delta deleted: ${delta.deletes.length}");
DLog.log("Delta updated: ${delta.updates.length}");
DLog.log("Delta deleted: ${delta.deletes.length}");
final deviceAlbums = await _nativeSyncApi.getAlbums();
await _localAlbumRepository.updateAll(deviceAlbums.toLocalAlbums());
@@ -80,6 +83,7 @@ class LocalSyncService {
} finally {
stopwatch.stop();
_log.info("Device sync took - ${stopwatch.elapsedMilliseconds}ms");
DLog.log("Device sync took - ${stopwatch.elapsedMilliseconds}ms");
}
}
@@ -102,6 +106,7 @@ class LocalSyncService {
await _nativeSyncApi.checkpointSync();
stopwatch.stop();
_log.info("Full device sync took - ${stopwatch.elapsedMilliseconds}ms");
DLog.log("Full device sync took - ${stopwatch.elapsedMilliseconds}ms");
} catch (e, s) {
_log.severe("Error performing full device sync", e, s);
}
@@ -145,6 +150,7 @@ class LocalSyncService {
// Faster path - only new assets added
if (await checkAddition(dbAlbum, deviceAlbum)) {
_log.fine("Fast synced device album ${dbAlbum.name}");
DLog.log("Fast synced device album ${dbAlbum.name}");
return true;
}

View File

@@ -14,7 +14,7 @@ import 'package:logging/logging.dart';
/// writes them to a persistent [ILogRepository], and manages log levels
/// via [IStoreRepository]
class LogService {
final LogRepository _logRepository;
final IsarLogRepository _logRepository;
final IsarStoreRepository _storeRepository;
final List<LogMessage> _msgBuffer = [];
@@ -37,7 +37,7 @@ class LogService {
}
static Future<LogService> init({
required LogRepository logRepository,
required IsarLogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
@@ -50,7 +50,7 @@ class LogService {
}
static Future<LogService> create({
required LogRepository logRepository,
required IsarLogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
@@ -85,7 +85,7 @@ class LogService {
if (_shouldBuffer) {
_msgBuffer.add(record);
_flushTimer ??= Timer(const Duration(seconds: 5), () => unawaited(_flushBuffer()));
_flushTimer ??= Timer(const Duration(seconds: 5), () => unawaited(flushBuffer()));
} else {
unawaited(_logRepository.insert(record));
}
@@ -108,18 +108,20 @@ class LogService {
await _logRepository.deleteAll();
}
Future<void> flush() {
void flush() {
_flushTimer?.cancel();
return _flushBuffer();
// TODO: Rename enable this after moving to sqlite - #16504
// await _flushBufferToDatabase();
}
Future<void> dispose() {
_flushTimer?.cancel();
_logSubscription.cancel();
return _flushBuffer();
return flushBuffer();
}
Future<void> _flushBuffer() async {
// TOOD: Move this to private once Isar is removed
Future<void> flushBuffer() async {
_flushTimer = null;
final buffer = [..._msgBuffer];
_msgBuffer.clear();

View File

@@ -1,23 +0,0 @@
import 'package:immich_mobile/domain/models/map.model.dart';
import 'package:immich_mobile/infrastructure/repositories/map.repository.dart';
import 'package:maplibre_gl/maplibre_gl.dart';
typedef MapMarkerSource = Future<List<Marker>> Function(LatLngBounds? bounds);
typedef MapQuery = ({MapMarkerSource markerSource});
class MapFactory {
final DriftMapRepository _mapRepository;
const MapFactory({required DriftMapRepository mapRepository}) : _mapRepository = mapRepository;
MapService remote(String ownerId) => MapService(_mapRepository.remote(ownerId));
}
class MapService {
final MapMarkerSource _markerSource;
MapService(MapQuery query) : _markerSource = query.markerSource;
Future<List<Marker>> Function(LatLngBounds? bounds) get getMarkers => _markerSource;
}

View File

@@ -1,12 +1,12 @@
import 'dart:async';
import 'package:collection/collection.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/infrastructure/repositories/remote_album.repository.dart';
import 'package:immich_mobile/models/albums/album_search.model.dart';
import 'package:immich_mobile/repositories/drift_album_api_repository.dart';
import 'package:immich_mobile/utils/remote_album.utils.dart';
class RemoteAlbumService {
final DriftRemoteAlbumRepository _repository;
@@ -26,21 +26,8 @@ class RemoteAlbumService {
return _repository.get(albumId);
}
Future<List<RemoteAlbum>> sortAlbums(
List<RemoteAlbum> albums,
RemoteAlbumSortMode sortMode, {
bool isReverse = false,
}) async {
final List<RemoteAlbum> sorted = switch (sortMode) {
RemoteAlbumSortMode.created => albums.sortedBy((album) => album.createdAt),
RemoteAlbumSortMode.title => albums.sortedBy((album) => album.name),
RemoteAlbumSortMode.lastModified => albums.sortedBy((album) => album.updatedAt),
RemoteAlbumSortMode.assetCount => albums.sortedBy((album) => album.assetCount),
RemoteAlbumSortMode.mostRecent => await _sortByNewestAsset(albums),
RemoteAlbumSortMode.mostOldest => await _sortByOldestAsset(albums),
};
return (isReverse ? sorted.reversed : sorted).toList();
List<RemoteAlbum> sortAlbums(List<RemoteAlbum> albums, RemoteAlbumSortMode sortMode, {bool isReverse = false}) {
return sortMode.sortFn(albums, isReverse);
}
List<RemoteAlbum> searchAlbums(
@@ -156,60 +143,4 @@ class RemoteAlbumService {
Future<int> getCount() {
return _repository.getCount();
}
Future<List<RemoteAlbum>> _sortByNewestAsset(List<RemoteAlbum> albums) async {
// map album IDs to their newest asset dates
final Map<String, Future<DateTime?>> assetTimestampFutures = {};
for (final album in albums) {
assetTimestampFutures[album.id] = _repository.getNewestAssetTimestamp(album.id);
}
// await all database queries
final entries = await Future.wait(
assetTimestampFutures.entries.map((entry) async => MapEntry(entry.key, await entry.value)),
);
final assetTimestamps = Map.fromEntries(entries);
final sorted = albums.sorted((a, b) {
final aDate = assetTimestamps[a.id] ?? DateTime.fromMillisecondsSinceEpoch(0);
final bDate = assetTimestamps[b.id] ?? DateTime.fromMillisecondsSinceEpoch(0);
return aDate.compareTo(bDate);
});
return sorted;
}
Future<List<RemoteAlbum>> _sortByOldestAsset(List<RemoteAlbum> albums) async {
// map album IDs to their oldest asset dates
final Map<String, Future<DateTime?>> assetTimestampFutures = {
for (final album in albums) album.id: _repository.getOldestAssetTimestamp(album.id),
};
// await all database queries
final entries = await Future.wait(
assetTimestampFutures.entries.map((entry) async => MapEntry(entry.key, await entry.value)),
);
final assetTimestamps = Map.fromEntries(entries);
final sorted = albums.sorted((a, b) {
final aDate = assetTimestamps[a.id] ?? DateTime.fromMillisecondsSinceEpoch(0);
final bDate = assetTimestamps[b.id] ?? DateTime.fromMillisecondsSinceEpoch(0);
return aDate.compareTo(bDate);
});
return sorted.reversed.toList();
}
}
enum RemoteAlbumSortMode {
title("library_page_sort_title"),
assetCount("library_page_sort_asset_count"),
lastModified("library_page_sort_last_modified"),
created("library_page_sort_created"),
mostRecent("sort_newest"),
mostOldest("sort_oldest");
final String key;
const RemoteAlbumSortMode(this.key);
}

View File

@@ -3,6 +3,7 @@ import 'dart:async';
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
@@ -25,6 +26,7 @@ class SyncStreamService {
Future<void> sync() {
_logger.info("Remote sync request for user");
DLog.log("Remote sync request for user");
// Start the sync stream and handle events
return _syncApiRepository.streamChanges(_handleEvents);
}

View File

@@ -10,7 +10,6 @@ import 'package:immich_mobile/domain/services/setting.service.dart';
import 'package:immich_mobile/domain/utils/event_stream.dart';
import 'package:immich_mobile/infrastructure/repositories/timeline.repository.dart';
import 'package:immich_mobile/utils/async_mutex.dart';
import 'package:maplibre_gl/maplibre_gl.dart';
typedef TimelineAssetSource = Future<List<BaseAsset>> Function(int index, int count);
@@ -58,8 +57,6 @@ class TimelineFactory {
TimelineService(_timelineRepository.person(userId, personId, groupBy));
TimelineService fromAssets(List<BaseAsset> assets) => TimelineService(_timelineRepository.fromAssets(assets));
TimelineService map(LatLngBounds bounds) => TimelineService(_timelineRepository.map(bounds, groupBy));
}
class TimelineService {
@@ -169,36 +166,6 @@ class TimelineService {
return _buffer.elementAt(index - _bufferOffset);
}
/// Gets an asset at the given index, automatically loading the buffer if needed.
/// This is an async version that can handle out-of-range indices by loading the appropriate buffer.
Future<BaseAsset?> getAssetAsync(int index) async {
if (index < 0 || index >= _totalAssets) {
return null;
}
if (hasRange(index, 1)) {
return _buffer.elementAt(index - _bufferOffset);
}
// Load the buffer containing the requested index
try {
final assets = await loadAssets(index, 1);
return assets.isNotEmpty ? assets.first : null;
} catch (e) {
return null;
}
}
/// Safely gets an asset at the given index without throwing a RangeError.
/// Returns null if the index is out of bounds or not currently in the buffer.
/// For automatic buffer loading, use getAssetAsync instead.
BaseAsset? getAssetSafe(int index) {
if (index < 0 || index >= _totalAssets || !hasRange(index, 1)) {
return null;
}
return _buffer.elementAt(index - _bufferOffset);
}
Future<void> dispose() async {
await _bucketSubscription?.cancel();
_bucketSubscription = null;

View File

@@ -95,7 +95,6 @@ class ExifInfo {
);
}
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)')
class RemoteExifEntity extends Table with DriftDefaultsMixin {
const RemoteExifEntity();

View File

@@ -699,10 +699,6 @@ typedef $$RemoteExifEntityTableProcessedTableManager =
i1.RemoteExifEntityData,
i0.PrefetchHooks Function({bool assetId})
>;
i0.Index get idxLatLng => i0.Index(
'idx_lat_lng',
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
);
class $RemoteExifEntityTable extends i2.RemoteExifEntity
with i0.TableInfo<$RemoteExifEntityTable, i1.RemoteExifEntityData> {

View File

@@ -4,7 +4,7 @@ import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.d
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)')
@TableIndex(name: 'idx_local_asset_checksum', columns: {#checksum})
class LocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
const LocalAssetEntity();

View File

@@ -338,7 +338,7 @@ typedef $$LocalAssetEntityTableProcessedTableManager =
>;
i0.Index get idxLocalAssetChecksum => i0.Index(
'idx_local_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
'CREATE INDEX idx_local_asset_checksum ON local_asset_entity (checksum)',
);
class $LocalAssetEntityTable extends i3.LocalAssetEntity

View File

@@ -1,29 +1,47 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart';
import 'package:immich_mobile/domain/models/log.model.dart' as domain;
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:isar/isar.dart';
class LogMessageEntity extends Table {
const LogMessageEntity();
part 'log.entity.g.dart';
@override
String get tableName => 'logger_messages';
@Collection(inheritance: false)
class LoggerMessage {
final Id id = Isar.autoIncrement;
final String message;
final String? details;
@Enumerated(EnumType.ordinal)
final LogLevel level;
final DateTime createdAt;
final String? context1;
final String? context2;
IntColumn get id => integer().autoIncrement()();
TextColumn get message => text()();
TextColumn get details => text().nullable()();
IntColumn get level => intEnum<domain.LogLevel>()();
DateTimeColumn get createdAt => dateTime()();
TextColumn get logger => text().nullable()();
TextColumn get stack => text().nullable()();
}
extension LogMessageEntityDataDomainEx on LogMessageEntityData {
domain.LogMessage toDto() => domain.LogMessage(
message: message,
level: level,
createdAt: createdAt,
logger: logger,
error: details,
stack: stack,
);
const LoggerMessage({
required this.message,
required this.details,
this.level = LogLevel.info,
required this.createdAt,
required this.context1,
required this.context2,
});
LogMessage toDto() {
return LogMessage(
message: message,
level: level,
createdAt: createdAt,
logger: context1,
error: details,
stack: context2,
);
}
static LoggerMessage fromDto(LogMessage log) {
return LoggerMessage(
message: log.message,
details: log.error,
level: log.level,
createdAt: log.createdAt,
context1: log.logger,
context2: log.stack,
);
}
}

View File

@@ -1,697 +0,0 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart'
as i1;
import 'package:immich_mobile/domain/models/log.model.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/log.entity.dart' as i3;
typedef $$LogMessageEntityTableCreateCompanionBuilder =
i1.LogMessageEntityCompanion Function({
i0.Value<int> id,
required String message,
i0.Value<String?> details,
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> logger,
i0.Value<String?> stack,
});
typedef $$LogMessageEntityTableUpdateCompanionBuilder =
i1.LogMessageEntityCompanion Function({
i0.Value<int> id,
i0.Value<String> message,
i0.Value<String?> details,
i0.Value<i2.LogLevel> level,
i0.Value<DateTime> createdAt,
i0.Value<String?> logger,
i0.Value<String?> stack,
});
class $$LogMessageEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get id => $composableBuilder(
column: $table.id,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get message => $composableBuilder(
column: $table.message,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get details => $composableBuilder(
column: $table.details,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnWithTypeConverterFilters<i2.LogLevel, i2.LogLevel, int> get level =>
$composableBuilder(
column: $table.level,
builder: (column) => i0.ColumnWithTypeConverterFilters(column),
);
i0.ColumnFilters<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get logger => $composableBuilder(
column: $table.logger,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get stack => $composableBuilder(
column: $table.stack,
builder: (column) => i0.ColumnFilters(column),
);
}
class $$LogMessageEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get id => $composableBuilder(
column: $table.id,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get message => $composableBuilder(
column: $table.message,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get details => $composableBuilder(
column: $table.details,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<int> get level => $composableBuilder(
column: $table.level,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get logger => $composableBuilder(
column: $table.logger,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get stack => $composableBuilder(
column: $table.stack,
builder: (column) => i0.ColumnOrderings(column),
);
}
class $$LogMessageEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get id =>
$composableBuilder(column: $table.id, builder: (column) => column);
i0.GeneratedColumn<String> get message =>
$composableBuilder(column: $table.message, builder: (column) => column);
i0.GeneratedColumn<String> get details =>
$composableBuilder(column: $table.details, builder: (column) => column);
i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> get level =>
$composableBuilder(column: $table.level, builder: (column) => column);
i0.GeneratedColumn<DateTime> get createdAt =>
$composableBuilder(column: $table.createdAt, builder: (column) => column);
i0.GeneratedColumn<String> get logger =>
$composableBuilder(column: $table.logger, builder: (column) => column);
i0.GeneratedColumn<String> get stack =>
$composableBuilder(column: $table.stack, builder: (column) => column);
}
class $$LogMessageEntityTableTableManager
extends
i0.RootTableManager<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData,
i1.$$LogMessageEntityTableFilterComposer,
i1.$$LogMessageEntityTableOrderingComposer,
i1.$$LogMessageEntityTableAnnotationComposer,
$$LogMessageEntityTableCreateCompanionBuilder,
$$LogMessageEntityTableUpdateCompanionBuilder,
(
i1.LogMessageEntityData,
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData
>,
),
i1.LogMessageEntityData,
i0.PrefetchHooks Function()
> {
$$LogMessageEntityTableTableManager(
i0.GeneratedDatabase db,
i1.$LogMessageEntityTable table,
) : super(
i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () =>
i1.$$LogMessageEntityTableFilterComposer($db: db, $table: table),
createOrderingComposer: () => i1
.$$LogMessageEntityTableOrderingComposer($db: db, $table: table),
createComputedFieldComposer: () =>
i1.$$LogMessageEntityTableAnnotationComposer(
$db: db,
$table: table,
),
updateCompanionCallback:
({
i0.Value<int> id = const i0.Value.absent(),
i0.Value<String> message = const i0.Value.absent(),
i0.Value<String?> details = const i0.Value.absent(),
i0.Value<i2.LogLevel> level = const i0.Value.absent(),
i0.Value<DateTime> createdAt = const i0.Value.absent(),
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityCompanion(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
logger: logger,
stack: stack,
),
createCompanionCallback:
({
i0.Value<int> id = const i0.Value.absent(),
required String message,
i0.Value<String?> details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityCompanion.insert(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
logger: logger,
stack: stack,
),
withReferenceMapper: (p0) => p0
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
.toList(),
prefetchHooksCallback: null,
),
);
}
typedef $$LogMessageEntityTableProcessedTableManager =
i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData,
i1.$$LogMessageEntityTableFilterComposer,
i1.$$LogMessageEntityTableOrderingComposer,
i1.$$LogMessageEntityTableAnnotationComposer,
$$LogMessageEntityTableCreateCompanionBuilder,
$$LogMessageEntityTableUpdateCompanionBuilder,
(
i1.LogMessageEntityData,
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData
>,
),
i1.LogMessageEntityData,
i0.PrefetchHooks Function()
>;
class $LogMessageEntityTable extends i3.LogMessageEntity
with i0.TableInfo<$LogMessageEntityTable, i1.LogMessageEntityData> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$LogMessageEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
@override
late final i0.GeneratedColumn<int> id = i0.GeneratedColumn<int>(
'id',
aliasedName,
false,
hasAutoIncrement: true,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
'PRIMARY KEY AUTOINCREMENT',
),
);
static const i0.VerificationMeta _messageMeta = const i0.VerificationMeta(
'message',
);
@override
late final i0.GeneratedColumn<String> message = i0.GeneratedColumn<String>(
'message',
aliasedName,
false,
type: i0.DriftSqlType.string,
requiredDuringInsert: true,
);
static const i0.VerificationMeta _detailsMeta = const i0.VerificationMeta(
'details',
);
@override
late final i0.GeneratedColumn<String> details = i0.GeneratedColumn<String>(
'details',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
late final i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> level =
i0.GeneratedColumn<int>(
'level',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: true,
).withConverter<i2.LogLevel>(i1.$LogMessageEntityTable.$converterlevel);
static const i0.VerificationMeta _createdAtMeta = const i0.VerificationMeta(
'createdAt',
);
@override
late final i0.GeneratedColumn<DateTime> createdAt =
i0.GeneratedColumn<DateTime>(
'created_at',
aliasedName,
false,
type: i0.DriftSqlType.dateTime,
requiredDuringInsert: true,
);
static const i0.VerificationMeta _loggerMeta = const i0.VerificationMeta(
'logger',
);
@override
late final i0.GeneratedColumn<String> logger = i0.GeneratedColumn<String>(
'logger',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
static const i0.VerificationMeta _stackMeta = const i0.VerificationMeta(
'stack',
);
@override
late final i0.GeneratedColumn<String> stack = i0.GeneratedColumn<String>(
'stack',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
List<i0.GeneratedColumn> get $columns => [
id,
message,
details,
level,
createdAt,
logger,
stack,
];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'logger_messages';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.LogMessageEntityData> instance, {
bool isInserting = false,
}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('id')) {
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
}
if (data.containsKey('message')) {
context.handle(
_messageMeta,
message.isAcceptableOrUnknown(data['message']!, _messageMeta),
);
} else if (isInserting) {
context.missing(_messageMeta);
}
if (data.containsKey('details')) {
context.handle(
_detailsMeta,
details.isAcceptableOrUnknown(data['details']!, _detailsMeta),
);
}
if (data.containsKey('created_at')) {
context.handle(
_createdAtMeta,
createdAt.isAcceptableOrUnknown(data['created_at']!, _createdAtMeta),
);
} else if (isInserting) {
context.missing(_createdAtMeta);
}
if (data.containsKey('logger')) {
context.handle(
_loggerMeta,
logger.isAcceptableOrUnknown(data['logger']!, _loggerMeta),
);
}
if (data.containsKey('stack')) {
context.handle(
_stackMeta,
stack.isAcceptableOrUnknown(data['stack']!, _stackMeta),
);
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {id};
@override
i1.LogMessageEntityData map(
Map<String, dynamic> data, {
String? tablePrefix,
}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.LogMessageEntityData(
id: attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}id'],
)!,
message: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}message'],
)!,
details: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}details'],
),
level: i1.$LogMessageEntityTable.$converterlevel.fromSql(
attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}level'],
)!,
),
createdAt: attachedDatabase.typeMapping.read(
i0.DriftSqlType.dateTime,
data['${effectivePrefix}created_at'],
)!,
logger: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}logger'],
),
stack: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}stack'],
),
);
}
@override
$LogMessageEntityTable createAlias(String alias) {
return $LogMessageEntityTable(attachedDatabase, alias);
}
static i0.JsonTypeConverter2<i2.LogLevel, int, int> $converterlevel =
const i0.EnumIndexConverter<i2.LogLevel>(i2.LogLevel.values);
}
class LogMessageEntityData extends i0.DataClass
implements i0.Insertable<i1.LogMessageEntityData> {
final int id;
final String message;
final String? details;
final i2.LogLevel level;
final DateTime createdAt;
final String? logger;
final String? stack;
const LogMessageEntityData({
required this.id,
required this.message,
this.details,
required this.level,
required this.createdAt,
this.logger,
this.stack,
});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['id'] = i0.Variable<int>(id);
map['message'] = i0.Variable<String>(message);
if (!nullToAbsent || details != null) {
map['details'] = i0.Variable<String>(details);
}
{
map['level'] = i0.Variable<int>(
i1.$LogMessageEntityTable.$converterlevel.toSql(level),
);
}
map['created_at'] = i0.Variable<DateTime>(createdAt);
if (!nullToAbsent || logger != null) {
map['logger'] = i0.Variable<String>(logger);
}
if (!nullToAbsent || stack != null) {
map['stack'] = i0.Variable<String>(stack);
}
return map;
}
factory LogMessageEntityData.fromJson(
Map<String, dynamic> json, {
i0.ValueSerializer? serializer,
}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return LogMessageEntityData(
id: serializer.fromJson<int>(json['id']),
message: serializer.fromJson<String>(json['message']),
details: serializer.fromJson<String?>(json['details']),
level: i1.$LogMessageEntityTable.$converterlevel.fromJson(
serializer.fromJson<int>(json['level']),
),
createdAt: serializer.fromJson<DateTime>(json['createdAt']),
logger: serializer.fromJson<String?>(json['logger']),
stack: serializer.fromJson<String?>(json['stack']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'id': serializer.toJson<int>(id),
'message': serializer.toJson<String>(message),
'details': serializer.toJson<String?>(details),
'level': serializer.toJson<int>(
i1.$LogMessageEntityTable.$converterlevel.toJson(level),
),
'createdAt': serializer.toJson<DateTime>(createdAt),
'logger': serializer.toJson<String?>(logger),
'stack': serializer.toJson<String?>(stack),
};
}
i1.LogMessageEntityData copyWith({
int? id,
String? message,
i0.Value<String?> details = const i0.Value.absent(),
i2.LogLevel? level,
DateTime? createdAt,
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityData(
id: id ?? this.id,
message: message ?? this.message,
details: details.present ? details.value : this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
logger: logger.present ? logger.value : this.logger,
stack: stack.present ? stack.value : this.stack,
);
LogMessageEntityData copyWithCompanion(i1.LogMessageEntityCompanion data) {
return LogMessageEntityData(
id: data.id.present ? data.id.value : this.id,
message: data.message.present ? data.message.value : this.message,
details: data.details.present ? data.details.value : this.details,
level: data.level.present ? data.level.value : this.level,
createdAt: data.createdAt.present ? data.createdAt.value : this.createdAt,
logger: data.logger.present ? data.logger.value : this.logger,
stack: data.stack.present ? data.stack.value : this.stack,
);
}
@override
String toString() {
return (StringBuffer('LogMessageEntityData(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('logger: $logger, ')
..write('stack: $stack')
..write(')'))
.toString();
}
@override
int get hashCode =>
Object.hash(id, message, details, level, createdAt, logger, stack);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.LogMessageEntityData &&
other.id == this.id &&
other.message == this.message &&
other.details == this.details &&
other.level == this.level &&
other.createdAt == this.createdAt &&
other.logger == this.logger &&
other.stack == this.stack);
}
class LogMessageEntityCompanion
extends i0.UpdateCompanion<i1.LogMessageEntityData> {
final i0.Value<int> id;
final i0.Value<String> message;
final i0.Value<String?> details;
final i0.Value<i2.LogLevel> level;
final i0.Value<DateTime> createdAt;
final i0.Value<String?> logger;
final i0.Value<String?> stack;
const LogMessageEntityCompanion({
this.id = const i0.Value.absent(),
this.message = const i0.Value.absent(),
this.details = const i0.Value.absent(),
this.level = const i0.Value.absent(),
this.createdAt = const i0.Value.absent(),
this.logger = const i0.Value.absent(),
this.stack = const i0.Value.absent(),
});
LogMessageEntityCompanion.insert({
this.id = const i0.Value.absent(),
required String message,
this.details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
this.logger = const i0.Value.absent(),
this.stack = const i0.Value.absent(),
}) : message = i0.Value(message),
level = i0.Value(level),
createdAt = i0.Value(createdAt);
static i0.Insertable<i1.LogMessageEntityData> custom({
i0.Expression<int>? id,
i0.Expression<String>? message,
i0.Expression<String>? details,
i0.Expression<int>? level,
i0.Expression<DateTime>? createdAt,
i0.Expression<String>? logger,
i0.Expression<String>? stack,
}) {
return i0.RawValuesInsertable({
if (id != null) 'id': id,
if (message != null) 'message': message,
if (details != null) 'details': details,
if (level != null) 'level': level,
if (createdAt != null) 'created_at': createdAt,
if (logger != null) 'logger': logger,
if (stack != null) 'stack': stack,
});
}
i1.LogMessageEntityCompanion copyWith({
i0.Value<int>? id,
i0.Value<String>? message,
i0.Value<String?>? details,
i0.Value<i2.LogLevel>? level,
i0.Value<DateTime>? createdAt,
i0.Value<String?>? logger,
i0.Value<String?>? stack,
}) {
return i1.LogMessageEntityCompanion(
id: id ?? this.id,
message: message ?? this.message,
details: details ?? this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
logger: logger ?? this.logger,
stack: stack ?? this.stack,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (id.present) {
map['id'] = i0.Variable<int>(id.value);
}
if (message.present) {
map['message'] = i0.Variable<String>(message.value);
}
if (details.present) {
map['details'] = i0.Variable<String>(details.value);
}
if (level.present) {
map['level'] = i0.Variable<int>(
i1.$LogMessageEntityTable.$converterlevel.toSql(level.value),
);
}
if (createdAt.present) {
map['created_at'] = i0.Variable<DateTime>(createdAt.value);
}
if (logger.present) {
map['logger'] = i0.Variable<String>(logger.value);
}
if (stack.present) {
map['stack'] = i0.Variable<String>(stack.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('LogMessageEntityCompanion(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('logger: $logger, ')
..write('stack: $stack')
..write(')'))
.toString();
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,9 +5,7 @@ import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
@TableIndex.sql(
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
)
@TableIndex(name: 'idx_remote_asset_owner_checksum', columns: {#ownerId, #checksum})
@TableIndex.sql('''
CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum
ON remote_asset_entity (owner_id, checksum)
@@ -18,7 +16,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum
ON remote_asset_entity (owner_id, library_id, checksum)
WHERE (library_id IS NOT NULL);
''')
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)')
@TableIndex(name: 'idx_remote_asset_checksum', columns: {#checksum})
class RemoteAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
const RemoteAssetEntity();

View File

@@ -628,7 +628,7 @@ typedef $$RemoteAssetEntityTableProcessedTableManager =
>;
i0.Index get idxRemoteAssetOwnerChecksum => i0.Index(
'idx_remote_asset_owner_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
'CREATE INDEX idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
);
class $RemoteAssetEntityTable extends i3.RemoteAssetEntity
@@ -1641,5 +1641,5 @@ i0.Index get uQRemoteAssetsOwnerLibraryChecksum => i0.Index(
);
i0.Index get idxRemoteAssetChecksum => i0.Index(
'idx_remote_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
'CREATE INDEX idx_remote_asset_checksum ON remote_asset_entity (checksum)',
);

View File

@@ -21,7 +21,7 @@ import 'package:immich_mobile/infrastructure/entities/stack.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.steps.dart';
import 'package:isar/isar.dart' hide Index;
import 'package:isar/isar.dart';
import 'db.repository.drift.dart';
@@ -66,7 +66,7 @@ class Drift extends $Drift implements IDatabaseRepository {
: super(executor ?? driftDatabase(name: 'immich', native: const DriftNativeOptions(shareAcrossIsolates: true)));
@override
int get schemaVersion => 7;
int get schemaVersion => 6;
@override
MigrationStrategy get migration => MigrationStrategy(
@@ -106,18 +106,12 @@ class Drift extends $Drift implements IDatabaseRepository {
from5To6: (m, v6) async {
// Drops the (checksum, ownerId) and adds it back as (ownerId, checksum)
await customStatement('DROP INDEX IF EXISTS UQ_remote_asset_owner_checksum');
await m.drop(v6.idxRemoteAssetOwnerChecksum);
await m.create(v6.idxRemoteAssetOwnerChecksum);
// Adds libraryId to remote_asset_entity
await m.addColumn(v6.remoteAssetEntity, v6.remoteAssetEntity.libraryId);
await m.drop(v6.uQRemoteAssetsOwnerChecksum);
await m.create(v6.uQRemoteAssetsOwnerChecksum);
await m.drop(v6.uQRemoteAssetsOwnerLibraryChecksum);
await m.create(v6.uQRemoteAssetsOwnerLibraryChecksum);
},
from6To7: (m, v7) async {
await m.createIndex(v7.idxLatLng);
},
),
);

View File

@@ -98,7 +98,6 @@ abstract class $Drift extends i0.GeneratedDatabase {
memoryAssetEntity,
personEntity,
assetFaceEntity,
i9.idxLatLng,
];
@override
i0.StreamQueryUpdateRules

View File

@@ -2705,357 +2705,12 @@ i1.GeneratedColumn<String> _column_86(String aliasedName) =>
true,
type: i1.DriftSqlType.string,
);
final class Schema7 extends i0.VersionedSchema {
Schema7({required super.database}) : super(version: 7);
@override
late final List<i1.DatabaseSchemaEntity> entities = [
userEntity,
remoteAssetEntity,
stackEntity,
localAssetEntity,
localAlbumEntity,
localAlbumAssetEntity,
idxLocalAssetChecksum,
idxRemoteAssetOwnerChecksum,
uQRemoteAssetsOwnerChecksum,
uQRemoteAssetsOwnerLibraryChecksum,
idxRemoteAssetChecksum,
userMetadataEntity,
partnerEntity,
remoteExifEntity,
remoteAlbumEntity,
remoteAlbumAssetEntity,
remoteAlbumUserEntity,
memoryEntity,
memoryAssetEntity,
personEntity,
assetFaceEntity,
idxLatLng,
];
late final Shape16 userEntity = Shape16(
source: i0.VersionedTable(
entityName: 'user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_2,
_column_3,
_column_84,
_column_85,
_column_5,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape17 remoteAssetEntity = Shape17(
source: i0.VersionedTable(
entityName: 'remote_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_13,
_column_14,
_column_15,
_column_16,
_column_17,
_column_18,
_column_19,
_column_20,
_column_21,
_column_86,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape3 stackEntity = Shape3(
source: i0.VersionedTable(
entityName: 'stack_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
attachedDatabase: database,
),
alias: null,
);
late final Shape2 localAssetEntity = Shape2(
source: i0.VersionedTable(
entityName: 'local_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_22,
_column_14,
_column_23,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape6 localAlbumEntity = Shape6(
source: i0.VersionedTable(
entityName: 'local_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_5,
_column_31,
_column_32,
_column_33,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape7 localAlbumAssetEntity = Shape7(
source: i0.VersionedTable(
entityName: 'local_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_34, _column_35],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLocalAssetChecksum = i1.Index(
'idx_local_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
);
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
'idx_remote_asset_owner_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
);
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
'UQ_remote_assets_owner_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
);
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
'UQ_remote_assets_owner_library_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
);
final i1.Index idxRemoteAssetChecksum = i1.Index(
'idx_remote_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
);
late final Shape4 userMetadataEntity = Shape4(
source: i0.VersionedTable(
entityName: 'user_metadata_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
columns: [_column_25, _column_26, _column_27],
attachedDatabase: database,
),
alias: null,
);
late final Shape5 partnerEntity = Shape5(
source: i0.VersionedTable(
entityName: 'partner_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
columns: [_column_28, _column_29, _column_30],
attachedDatabase: database,
),
alias: null,
);
late final Shape8 remoteExifEntity = Shape8(
source: i0.VersionedTable(
entityName: 'remote_exif_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id)'],
columns: [
_column_36,
_column_37,
_column_38,
_column_39,
_column_40,
_column_41,
_column_11,
_column_10,
_column_42,
_column_43,
_column_44,
_column_45,
_column_46,
_column_47,
_column_48,
_column_49,
_column_50,
_column_51,
_column_52,
_column_53,
_column_54,
_column_55,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape9 remoteAlbumEntity = Shape9(
source: i0.VersionedTable(
entityName: 'remote_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_56,
_column_9,
_column_5,
_column_15,
_column_57,
_column_58,
_column_59,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape7 remoteAlbumAssetEntity = Shape7(
source: i0.VersionedTable(
entityName: 'remote_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_36, _column_60],
attachedDatabase: database,
),
alias: null,
);
late final Shape10 remoteAlbumUserEntity = Shape10(
source: i0.VersionedTable(
entityName: 'remote_album_user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
columns: [_column_60, _column_25, _column_61],
attachedDatabase: database,
),
alias: null,
);
late final Shape11 memoryEntity = Shape11(
source: i0.VersionedTable(
entityName: 'memory_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_18,
_column_15,
_column_8,
_column_62,
_column_63,
_column_64,
_column_65,
_column_66,
_column_67,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape12 memoryAssetEntity = Shape12(
source: i0.VersionedTable(
entityName: 'memory_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
columns: [_column_36, _column_68],
attachedDatabase: database,
),
alias: null,
);
late final Shape14 personEntity = Shape14(
source: i0.VersionedTable(
entityName: 'person_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_15,
_column_1,
_column_69,
_column_71,
_column_72,
_column_73,
_column_74,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape15 assetFaceEntity = Shape15(
source: i0.VersionedTable(
entityName: 'asset_face_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_36,
_column_76,
_column_77,
_column_78,
_column_79,
_column_80,
_column_81,
_column_82,
_column_83,
],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLatLng = i1.Index(
'idx_lat_lng',
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
);
}
i0.MigrationStepWithVersion migrationSteps({
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
required Future<void> Function(i1.Migrator m, Schema4 schema) from3To4,
required Future<void> Function(i1.Migrator m, Schema5 schema) from4To5,
required Future<void> Function(i1.Migrator m, Schema6 schema) from5To6,
required Future<void> Function(i1.Migrator m, Schema7 schema) from6To7,
}) {
return (currentVersion, database) async {
switch (currentVersion) {
@@ -3084,11 +2739,6 @@ i0.MigrationStepWithVersion migrationSteps({
final migrator = i1.Migrator(database, schema);
await from5To6(migrator, schema);
return 6;
case 6:
final schema = Schema7(database: database);
final migrator = i1.Migrator(database, schema);
await from6To7(migrator, schema);
return 7;
default:
throw ArgumentError.value('Unknown migration from $currentVersion');
}
@@ -3101,7 +2751,6 @@ i1.OnUpgrade stepByStep({
required Future<void> Function(i1.Migrator m, Schema4 schema) from3To4,
required Future<void> Function(i1.Migrator m, Schema5 schema) from4To5,
required Future<void> Function(i1.Migrator m, Schema6 schema) from5To6,
required Future<void> Function(i1.Migrator m, Schema7 schema) from6To7,
}) => i0.VersionedSchema.stepByStepHelper(
step: migrationSteps(
from1To2: from1To2,
@@ -3109,6 +2758,5 @@ i1.OnUpgrade stepByStep({
from3To4: from3To4,
from4To5: from4To5,
from5To6: from5To6,
from6To7: from6To7,
),
);

View File

@@ -8,7 +8,7 @@ import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/utils/database.utils.dart';
import 'package:platform/platform.dart';
enum SortLocalAlbumsBy { id, backupSelection, isIosSharedAlbum, name, assetCount, newestAsset }
enum SortLocalAlbumsBy { id, backupSelection, isIosSharedAlbum, name, assetCount }
class DriftLocalAlbumRepository extends DriftDatabaseRepository {
final Drift _db;
@@ -40,7 +40,6 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
SortLocalAlbumsBy.isIosSharedAlbum => OrderingTerm.asc(_db.localAlbumEntity.isIosSharedAlbum),
SortLocalAlbumsBy.name => OrderingTerm.asc(_db.localAlbumEntity.name),
SortLocalAlbumsBy.assetCount => OrderingTerm.desc(assetCount),
SortLocalAlbumsBy.newestAsset => OrderingTerm.desc(_db.localAlbumEntity.updatedAt),
});
}
query.orderBy(orderings);
@@ -320,7 +319,7 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
innerJoin(_db.localAssetEntity, _db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id)),
])
..where(_db.localAlbumAssetEntity.albumId.equals(albumId))
..orderBy([OrderingTerm.desc(_db.localAssetEntity.createdAt)])
..orderBy([OrderingTerm.asc(_db.localAssetEntity.id)])
..limit(1);
final results = await query.map((row) => row.readTable(_db.localAssetEntity).toDto()).get();

Some files were not shown because too many files have changed in this diff Show More