Compare commits

..

1 Commits

Author SHA1 Message Date
Min Idzelis
65f7b3a86d feat: webdav 2025-06-07 04:18:50 +00:00
1731 changed files with 35923 additions and 119054 deletions

2
.devcontainer/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.env
library

16
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:fb211a0ea31a6177507498c084682aae8c9c31ca27668ea122246aa16a4723a0
FROM ${BASEIMAGE}
# Flutter SDK
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
ENV FLUTTER_CHANNEL="stable"
ENV FLUTTER_VERSION="3.29.3"
ENV FLUTTER_HOME=/flutter
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
# Flutter SDK
RUN mkdir -p ${FLUTTER_HOME} \
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
&& rm flutter.tar.xz \
&& chown -R 1000:1000 ${FLUTTER_HOME}

View File

@@ -1,67 +1,26 @@
{
"name": "Immich - Backend, Frontend and ML",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"name": "Immich",
"service": "immich-devcontainer",
"dockerComposeFile": [
"../docker/docker-compose.dev.yml",
"./server/container-compose-overrides.yml"
"docker-compose.yml",
"../docker/docker-compose.dev.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dbaeumer.vscode-eslint",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
"svelte.svelte-vscode"
]
}
},
"forwardPorts": [3000, 9231, 9230, 2283],
"portsAttributes": {
"3000": {
"label": "Immich - Frontend HTTP",
"description": "The frontend of the Immich project",
"onAutoForward": "openBrowserOnce"
},
"2283": {
"label": "Immich - API Server - HTTP",
"description": "The API server of the Immich project"
},
"9231": {
"label": "Immich - API Server - DEBUG",
"description": "The API server of the Immich project"
},
"9230": {
"label": "Immich - Workers - DEBUG",
"description": "The workers of the Immich project"
}
},
"forwardPorts": [],
"initializeCommand": "bash .devcontainer/scripts/initializeCommand.sh",
"onCreateCommand": "bash .devcontainer/scripts/onCreateCommand.sh",
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
"workspaceFolder": "/immich",
"remoteUser": "node"
}

View File

@@ -0,0 +1,8 @@
services:
immich-devcontainer:
build:
dockerfile: Dockerfile
extra_hosts:
- 'host.docker.internal:host-gateway'
volumes:
- ..:/immich:cached

View File

@@ -1,34 +0,0 @@
services:
immich-server:
build:
target: dev-container-mobile
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override # bind mount host to /workspaces/immich
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
database:
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
# UPLOAD_LOCATION must be set to a absolute path or vol-upload
vol-upload:
# DB_DATA_LOCATION must be set to a absolute path or vol-database
vol-database:

View File

@@ -1,52 +0,0 @@
{
"name": "Immich - Mobile",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../../docker/docker-compose.dev.yml",
"./container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [],
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -0,0 +1,6 @@
#!/bin/bash
# If .env file does not exist, create it by copying example.env from the docker folder
if [ ! -f ".devcontainer/.env" ]; then
cp docker/example.env .devcontainer/.env
fi

View File

@@ -0,0 +1,25 @@
#!/bin/bash
# Enable multiarch for arm64 if necessary
if [ "$(dpkg --print-architecture)" = "arm64" ]; then
sudo dpkg --add-architecture amd64 && \
sudo apt-get update && \
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
libc6:amd64 \
libstdc++6:amd64 \
libgcc1:amd64
fi
# Install DCM
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
sudo echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
sudo apt-get update
sudo apt-get install dcm
dart --disable-analytics
# Install immich
cd /immich || exit
make install-all

View File

@@ -1,80 +0,0 @@
#!/bin/bash
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
export DEV_PORT="${DEV_PORT:-3000}"
# search for immich directory inside workspace.
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
# Devcontainer: Clone [repository|pull request] in container volumne
WORKSPACES_DIR="/workspaces"
IMMICH_DIR="$WORKSPACES_DIR/immich"
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
log() {
# Display command on console, log with timestamp to file
echo "$*"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
}
run_cmd() {
# Ensure log directory exists
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
log "$@"
# Execute command: display normally on console, log with timestamps to file
"$@" 2>&1 | tee >(while IFS= read -r line; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
done)
# Preserve exit status
return "${PIPESTATUS[0]}"
}
# Find directories excluding /workspaces/immich
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
if [ ${#other_dirs[@]} -gt 1 ]; then
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
exit 1
elif [ ${#other_dirs[@]} -eq 1 ]; then
export IMMICH_WORKSPACE="${other_dirs[0]}"
else
export IMMICH_WORKSPACE="$IMMICH_DIR"
fi
log "Found immich workspace in $IMMICH_WORKSPACE"
log ""
fix_permissions() {
log "Fixing permissions for ${IMMICH_WORKSPACE}"
run_cmd sudo find "${IMMICH_WORKSPACE}/server/upload" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres/*" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres" -exec chown node {} +
# Change ownership for directories that exist
for dir in "${IMMICH_WORKSPACE}/.vscode" \
"${IMMICH_WORKSPACE}/cli/node_modules" \
"${IMMICH_WORKSPACE}/e2e/node_modules" \
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
"${IMMICH_WORKSPACE}/server/node_modules" \
"${IMMICH_WORKSPACE}/server/dist" \
"${IMMICH_WORKSPACE}/web/node_modules" \
"${IMMICH_WORKSPACE}/web/dist"; do
if [ -d "$dir" ]; then
run_cmd sudo chown node -R "$dir"
fi
done
log ""
}
install_dependencies() {
log "Installing dependencies"
(
cd "${IMMICH_WORKSPACE}" || exit 1
export CI=1 FROZEN=1 OFFLINE=1
run_cmd make setup-web-dev setup-server-dev
)
log ""
}

View File

@@ -1,49 +0,0 @@
services:
immich-server:
build:
target: dev-container-server
env_file: !reset []
hostname: immich-dev
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/usr/src/app/upload
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/usr/src/app/upload/upload
- /etc/localtime:/etc/localtime:ro
immich-web:
env_file: !reset []
immich-machine-learning:
env_file: !reset []
database:
env_file: !reset []
environment: !override
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
POSTGRES_USER: ${DB_USERNAME-postgres}
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
POSTGRES_INITDB_ARGS: '--data-checksums'
POSTGRES_HOST_AUTH_METHOD: md5
volumes:
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
redis:
env_file: !reset []
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
upload1-devcontainer-volume:
upload2-devcontainer-volume:
postgres-devcontainer-volume:

View File

@@ -1,17 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Starting Nest API Server"
log ""
cd "${IMMICH_WORKSPACE}/server" || (
log "Immich workspace not found"
exit 1
)
while true; do
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Starting Immich Web Frontend"
log ""
cd "${IMMICH_WORKSPACE}/web" || (
log "Immich Workspace not found"
exit 1
)
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
log "Waiting for api server..."
sleep 1
done
while true; do
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Setting up Immich dev container..."
fix_permissions
log "Installing npm dependencies (node_modules)..."
install_dependencies
log "Setup complete, please wait while backend and frontend services automatically start"
log
log "If necessary, the services may be manually started using"
log
log "$ /immich-devcontainer/container-start-backend.sh"
log "$ /immich-devcontainer/container-start-frontend.sh"
log
log "From different terminal windows, as these scripts automatically restart the server"
log "on error, and will continuously run in a loop"

View File

@@ -1,41 +1,33 @@
.vscode/
.github/
.git/
.env*
*.log
*.tmp
*.temp
**/Dockerfile
**/node_modules/
**/.pnpm-store/
**/dist/
**/coverage/
**/build/
design/
docker/
!docker/scripts
docs/
!docs/package.json
!docs/package-lock.json
e2e/
!e2e/package.json
!e2e/package-lock.json
fastlane/
machine-learning/
misc/
mobile/
open-api/typescript-sdk/build/
!open-api/typescript-sdk/package.json
!open-api/typescript-sdk/package-lock.json
cli/coverage/
cli/dist/
cli/node_modules/
open-api/typescript-sdk/build/
open-api/typescript-sdk/node_modules/
server/coverage/
server/node_modules/
server/upload/
server/src/queries
server/dist/
server/www/
web/node_modules/
web/coverage/
web/.svelte-kit
web/build/
web/.env

6
.gitattributes vendored
View File

@@ -12,12 +12,6 @@ mobile/lib/**/*.drift.dart linguist-generated=true
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
mobile/lib/infrastructure/repositories/db.repository.steps.dart -diff -merge
mobile/lib/infrastructure/repositories/db.repository.steps.dart linguist-generated=true
mobile/test/drift/main/generated/** -diff -merge
mobile/test/drift/main/generated/** linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true

2
.github/.nvmrc vendored
View File

@@ -1 +1 @@
22.17.1
22.16.0

View File

@@ -1,4 +0,0 @@
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

6
.github/package-lock.json generated vendored
View File

@@ -9,9 +9,9 @@
}
},
"node_modules/prettier": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
"integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
"dev": true,
"license": "MIT",
"bin": {

View File

@@ -58,7 +58,7 @@ jobs:
contents: read
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: mich
runs-on: macos-14
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -66,40 +66,24 @@ jobs:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
- name: Create the Keystore
env:
KEY_JKS: ${{ secrets.KEY_JKS }}
working-directory: ./mobile
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
distribution: 'zulu'
java-version: '17'
- name: Restore Gradle Cache
id: cache-gradle-restore
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: build-mobile-gradle-${{ runner.os }}-main
cache: 'gradle'
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
cache: true
- name: Setup Android SDK
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
with:
packages: ''
- name: Create the Keystore
env:
KEY_JKS: ${{ secrets.KEY_JKS }}
working-directory: ./mobile
run: echo $KEY_JKS | base64 -d > android/key.jks
- name: Get Packages
working-directory: ./mobile
@@ -119,30 +103,12 @@ jobs:
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
run: |
if [[ $IS_MAIN == 'true' ]]; then
flutter build apk --release
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
else
flutter build apk --debug --split-per-abi --target-platform android-arm64
fi
flutter build apk --release
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
- name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk
- name: Save Gradle Cache
id: cache-gradle-save
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
if: github.ref == 'refs/heads/main'
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}

View File

@@ -38,9 +38,6 @@ jobs:
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Prepare SDK
run: npm ci --prefix ../open-api/typescript-sdk/
- name: Build SDK
@@ -70,7 +67,7 @@ jobs:
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
@@ -99,7 +96,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64

View File

@@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
category: '/language:${{matrix.language}}'

View File

@@ -131,7 +131,7 @@ jobs:
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
permissions:
contents: read
actions: read
@@ -154,7 +154,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
permissions:
contents: read
actions: read
@@ -177,9 +177,13 @@ jobs:
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
success-check-ml:
name: Docker Build & Push ML Success
@@ -188,6 +192,10 @@ jobs:
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

View File

@@ -57,8 +57,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './docs/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci

View File

@@ -32,8 +32,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Fix formatting
run: make install-all && make format-all

View File

@@ -1,13 +0,0 @@
name: Org Checks
on:
pull_request_review:
pull_request:
jobs:
check-approvals:
name: Check for Team/Admin Review
uses: immich-app/devtools/.github/workflows/required-approval.yml@main
permissions:
pull-requests: read
contents: read

View File

@@ -14,7 +14,7 @@ jobs:
pull-requests: write
steps:
- name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
with:
mode: exactly
count: 1

View File

@@ -100,7 +100,7 @@ jobs:
name: release-apk-signed
- name: Create draft release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}

View File

@@ -25,8 +25,6 @@ jobs:
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install deps
run: npm ci
- name: Build

View File

@@ -42,9 +42,6 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./mobile
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -52,29 +49,26 @@ jobs:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
- name: Install dependencies
run: dart pub get
- name: Install DCM
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
version: auto
working-directory: ./mobile
working-directory: ./mobile
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Run Build Runner
run: make build
working-directory: ./mobile
- name: Generate platform API
run: make pigeon
working-directory: ./mobile
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
@@ -96,17 +90,15 @@ jobs:
- name: Run dart analyze
run: dart analyze --fatal-infos
working-directory: ./mobile
- name: Run dart format
# Ignore generated files manually until https://github.com/dart-lang/dart_style/issues/864 is resolved
run: dart format --set-exit-if-changed $(find lib -name '*.dart' -not \( -name 'generated_plugin_registrant.dart' -o -name '*.g.dart' -o -name '*.drift.dart' \))
run: dart format lib/ --set-exit-if-changed
working-directory: ./mobile
- name: Run dart custom_lint
run: dart run custom_lint
# TODO: Use https://github.com/CQLabs/dcm-action
- name: Run DCM
run: dcm analyze lib --fatal-style --fatal-warnings
working-directory: ./mobile
zizmor:
name: zizmor
@@ -130,7 +122,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -84,8 +84,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
@@ -103,7 +101,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: npm test
run: npm run test:cov
if: ${{ !cancelled() }}
cli-unit-tests:
@@ -127,8 +125,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: npm ci && npm run build
@@ -150,7 +146,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: npm run test:cov
if: ${{ !cancelled() }}
cli-unit-tests-win:
@@ -174,8 +170,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: npm ci && npm run build
@@ -190,7 +184,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: npm run test:cov
if: ${{ !cancelled() }}
web-lint:
@@ -214,8 +208,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -257,8 +249,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -272,7 +262,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: npm run test:cov
if: ${{ !cancelled() }}
i18n-tests:
@@ -292,8 +282,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install dependencies
run: npm --prefix=web ci
@@ -338,8 +326,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -383,8 +369,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
@@ -418,8 +402,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -468,8 +450,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -481,7 +461,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
run: npx playwright install chromium --only-shell
run: npx playwright install --with-deps chromium
if: ${{ !cancelled() }}
- name: Docker build
@@ -499,9 +479,13 @@ jobs:
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
mobile-unit-tests:
name: Unit Test Mobile
@@ -516,7 +500,7 @@ jobs:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -588,8 +572,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
@@ -609,7 +591,7 @@ jobs:
persist-credentials: false
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
uses: ludeeus/action-shellcheck@master
with:
ignore_paths: >-
**/open-api/**
@@ -631,8 +613,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: npm --prefix=server ci
@@ -668,7 +648,7 @@ jobs:
contents: read
services:
postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:1f5583fe3397210a0fbc7f11b0cec18bacc4a99e3e8ea0548e9bd6bcf26ec37a
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
@@ -694,8 +674,6 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: npm ci
@@ -748,7 +726,6 @@ jobs:
run: |
echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
git diff
exit 1
# mobile-integration-tests:

View File

@@ -52,6 +52,10 @@ jobs:
permissions: {}
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

1
.gitignore vendored
View File

@@ -24,4 +24,3 @@ mobile/android/fastlane/report.xml
mobile/ios/fastlane/report.xml
vite.config.js.timestamp-*
.pnpm-store

72
.vscode/tasks.json vendored
View File

@@ -1,72 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Fix Permissions, Install Dependencies",
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich API Server (Nest)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Web Server (Vite)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Server and Web",
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
"runOptions": {
"runOn": "folderOpen"
},
"problemMatcher": []
}
]
}

View File

@@ -1,33 +1,27 @@
dev:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
dev-down:
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
dev-update:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
dev-scale:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
.PHONY: e2e
e2e:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-update:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-down:
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
prod:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
prod-scale:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
.PHONY: open-api
open-api:
@@ -54,8 +48,6 @@ audit-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
install-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
ci-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
build-cli: build-sdk
build-web: build-sdk
build-%: install-%
@@ -90,7 +82,6 @@ test-medium-dev:
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ;
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
@@ -99,12 +90,9 @@ hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
clean:
find . -name "node_modules" -type d -prune -exec rm -rf {} +
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
setup-server-dev: install-server
setup-web-dev: install-sdk build-sdk install-web
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
docker compose -f ./e2e/docker-compose.yml rm -v -f || true

View File

@@ -1 +1 @@
22.17.1
22.16.0

View File

@@ -1,2 +0,0 @@
#!/usr/bin/env node
import '../dist/index.js';

813
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,11 @@
{
"name": "@immich/cli",
"version": "2.2.73",
"version": "2.2.68",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
"bin": {
"immich": "./bin/immich"
"immich": "dist/index.js"
},
"license": "GNU Affero General Public License version 3",
"keywords": [
@@ -21,7 +21,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.16.4",
"@types/node": "^22.15.21",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",
@@ -29,14 +29,14 @@
"eslint": "^9.14.0",
"eslint-config-prettier": "^10.0.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^59.0.0",
"eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^7.0.0",
"vite": "^6.0.0",
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0",
"vitest-fetch-mock": "^0.4.0",
@@ -69,6 +69,6 @@
"micromatch": "^4.0.8"
},
"volta": {
"node": "22.17.1"
"node": "22.16.0"
}
}

View File

@@ -70,10 +70,7 @@ const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
await deleteFiles(newFiles, options);
};
export const startWatch = async (

View File

@@ -16,7 +16,7 @@ name: immich-dev
services:
immich-server:
container_name: immich_server
command: ['immich-dev']
command: [ '/usr/src/app/bin/immich-dev' ]
image: immich-server-dev:latest
# extends:
# file: hwaccel.transcoding.yml
@@ -27,11 +27,11 @@ services:
target: dev
restart: unless-stopped
volumes:
- ../server:/usr/src/app/server
- ../open-api:/usr/src/app/open-api
- ../server:/usr/src/app
- ../open-api:/usr/src/open-api
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
- /usr/src/app/server/node_modules
- /usr/src/app/node_modules
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
@@ -69,20 +69,19 @@ services:
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
# user: 0:0
build:
context: ../
dockerfile: web/Dockerfile
command: ['immich-web']
context: ../web
command: [ '/usr/src/app/bin/immich-web' ]
env_file:
- .env
ports:
- 3000:3000
- 24678:24678
volumes:
- ../web:/usr/src/app/web
- ../i18n:/usr/src/app/i18n
- ../open-api/:/usr/src/app/open-api/
- ../web:/usr/src/app
- ../i18n:/usr/src/i18n
- ../open-api/:/usr/src/open-api/
# - ../../ui:/usr/ui
- /usr/src/app/web/node_modules
- /usr/src/app/node_modules
ulimits:
nofile:
soft: 1048576
@@ -117,13 +116,13 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
image: docker.io/valkey/valkey:8-bookworm@sha256:a19bebed6a91bd5e6e2106fef015f9602a3392deeb7c9ed47548378dcee3dfc2
healthcheck:
test: redis-cli ping || exit 1
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
env_file:
- .env
environment:
@@ -135,7 +134,6 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports:
- 5432:5432
shm_size: 128mb
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# immich-prometheus:
# container_name: immich_prometheus

View File

@@ -56,14 +56,14 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
image: docker.io/valkey/valkey:8-bookworm@sha256:a19bebed6a91bd5e6e2106fef015f9602a3392deeb7c9ed47548378dcee3dfc2
healthcheck:
test: redis-cli ping || exit 1
restart: always
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
env_file:
- .env
environment:
@@ -75,7 +75,6 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports:
- 5432:5432
shm_size: 128mb
restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
@@ -83,7 +82,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
image: prom/prometheus@sha256:78ed1f9050eb9eaf766af6e580230b1c4965728650e332cd1ee918c0c4699775
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -92,10 +91,10 @@ services:
# add data source for http://immich-prometheus:9090 to get started
immich-grafana:
container_name: immich_grafana
command: ['./run.sh', '-disable-reporting']
command: [ './run.sh', '-disable-reporting' ]
ports:
- 3000:3000
image: grafana/grafana:12.0.2-ubuntu@sha256:0512d81cdeaaff0e370a9aa66027b465d1f1f04379c3a9c801a905fabbdbc7a5
image: grafana/grafana:11.6.1-ubuntu@sha256:6fc273288470ef499dd3c6b36aeade093170d4f608f864c5dd3a7fabeae77b50
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -49,14 +49,14 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
image: docker.io/valkey/valkey:8-bookworm@sha256:a19bebed6a91bd5e6e2106fef015f9602a3392deeb7c9ed47548378dcee3dfc2
healthcheck:
test: redis-cli ping || exit 1
restart: always
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
@@ -67,7 +67,6 @@ services:
volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
shm_size: 128mb
restart: always
volumes:

View File

@@ -1 +1 @@
22.17.1
22.16.0

View File

@@ -490,7 +490,7 @@ You can also scan the Postgres database file structure for errors:
<details>
<summary>Scan for file structure errors</summary>
```bash
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
```
A normal result will end something like this and return with an exit code of `0`:

View File

@@ -57,7 +57,7 @@ Then please follow the steps in the following section for restoring the database
<TabItem value="Linux system" label="Linux system" default>
```bash title='Backup'
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
```
```bash title='Restore'
@@ -79,7 +79,7 @@ docker compose up -d # Start remainder of Immich apps
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
```powershell title='Backup'
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres))
```
```powershell title='Restore'
@@ -150,10 +150,12 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
- **Encoded Assets:**
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`.
@@ -199,6 +201,7 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

View File

@@ -46,12 +46,6 @@ services:
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
<img src={require('./img/admin-jobs.webp').default} width="60%" title="Admin jobs" />
Additionally, some jobs (such as memories generation) run on a schedule, which is every night at midnight by default. To change when they run or enable/disable a job navigate to System Settings -> [Nightly Tasks Settings](https://my.immich.app/admin/system-settings?isOpen=nightly-tasks).
<img src={require('./img/admin-nightly-tasks.webp').default} width="60%" title="Admin nightly tasks" />
:::note
Some jobs ([External Libraries](/docs/features/libraries) scanning, Database Dump) are configured in their own sections in System Settings.
:::

View File

@@ -20,6 +20,7 @@ Immich supports 3rd party authentication via [OpenID Connect][oidc] (OIDC), an i
Before enabling OAuth in Immich, a new client application needs to be configured in the 3rd-party authentication server. While the specifics of this setup vary from provider to provider, the general approach should be the same.
1. Create a new (Client) Application
1. The **Provider** type should be `OpenID Connect` or `OAuth2`
2. The **Client type** should be `Confidential`
3. The **Application** type should be `Web`
@@ -28,6 +29,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
2. Configure Redirect URIs/Origins
The **Sign-in redirect URIs** should include:
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
@@ -35,17 +37,21 @@ Before enabling OAuth in Immich, a new client application needs to be configured
Redirect URIs should contain all the domains you will be using to access Immich. Some examples include:
Mobile
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
Localhost
- `http://localhost:2283/auth/login`
- `http://localhost:2283/user-settings`
Local IP
- `http://192.168.0.200:2283/auth/login`
- `http://192.168.0.200:2283/user-settings`
Hostname
- `https://immich.example.com/auth/login`
- `https://immich.example.com/user-settings`
@@ -62,7 +68,6 @@ Once you have a new OAuth client application configured, Immich can be configure
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label**¹** |
| Role Claim | string | immich_role | Claim mapping for the user's role. (should return "user" or "admin")**¹** |
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage**¹** |
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (Enter 0 for unlimited quota) |
| Button Text | string | Login with OAuth | Text for the OAuth button on the web |

View File

@@ -64,13 +64,7 @@ COMMIT;
### Updating VectorChord
When installing a new version of VectorChord, you will need to manually update the extension and reindex by connecting to the Immich database and running:
```
ALTER EXTENSION vchord UPDATE;
REINDEX INDEX face_index;
REINDEX INDEX clip_index;
```
When installing a new version of VectorChord, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vchord UPDATE;`.
## Migrating to VectorChord
@@ -82,8 +76,6 @@ Support for pgvecto.rs will be dropped in a later release, hence we recommend al
The easiest option is to have both extensions installed during the migration:
<details>
<summary>Migration steps (automatic)</summary>
1. Ensure you still have pgvecto.rs installed
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
3. [Install VectorChord][vchord-install]
@@ -97,12 +89,8 @@ The easiest option is to have both extensions installed during the migration:
11. Restart the Postgres database
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
</details>
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
<details>
<summary>Migration steps (manual)</summary>
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
```sql
@@ -135,20 +123,14 @@ ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
5. Start Immich and let it create new indices using VectorChord
</details>
### Migrating from pgvector
<details>
<summary>Migration steps</summary>
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
2. Follow the Prerequisites to install VectorChord
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
5. Start Immich and let it create new indices using VectorChord
</details>
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html

View File

@@ -2,17 +2,16 @@
The `immich-server` docker image comes preinstalled with an administrative CLI (`immich-admin`) that supports the following commands:
| Command | Description |
| ------------------------ | ------------------------------------------------------------- |
| `help` | Display help |
| `reset-admin-password` | Reset the password for the admin user |
| `disable-password-login` | Disable password login |
| `enable-password-login` | Enable password login |
| `enable-oauth-login` | Enable OAuth login |
| `disable-oauth-login` | Disable OAuth login |
| `list-users` | List Immich users |
| `version` | Print Immich version |
| `change-media-location` | Change database file paths to align with a new media location |
| Command | Description |
| ------------------------ | ------------------------------------- |
| `help` | Display help |
| `reset-admin-password` | Reset the password for the admin user |
| `disable-password-login` | Disable password login |
| `enable-password-login` | Enable password login |
| `enable-oauth-login` | Enable OAuth login |
| `disable-oauth-login` | Disable OAuth login |
| `list-users` | List Immich users |
| `version` | Print Immich version |
## How to run a command
@@ -89,24 +88,3 @@ Print Immich Version
immich-admin version
v1.129.0
```
Change media location
```
immich-admin change-media-location
? Enter the previous value of IMMICH_MEDIA_LOCATION: /usr/src/app/upload
? Enter the new value of IMMICH_MEDIA_LOCATION: /data
Previous value: /usr/src/app/upload
Current value: /data
Changing database paths from "/usr/src/app/upload/*" to "/data/*"
? Do you want to proceed? [Y/n] y
Database file paths updated successfully! 🎉
You may now set IMMICH_MEDIA_LOCATION=/data and restart!
(please remember to update applicable volume mounts e.g. ${UPLOAD_LOCATION}:/data)
```

View File

@@ -1,480 +0,0 @@
---
title: Devcontainers
sidebar_position: 3
---
# Development with Dev Containers
Dev Containers provide a consistent, reproducible development environment using Docker containers. With a single click, you can get started with an Immich development environment on Mac, Linux, Windows, or in the cloud using GitHub Codespaces.
Get started fast!
[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/immich-app/immich/)
[Learn more about Dev Containers](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers)
## Prerequisites
Before getting started, ensure you have:
- **Docker Desktop** (latest version)
- [Mac](https://docs.docker.com/desktop/install/mac-install/)
- [Windows](https://docs.docker.com/desktop/install/windows-install/) (with WSL2 backend recommended)
- [Linux](https://docs.docker.com/desktop/install/linux-install/)
- **Visual Studio Code** with the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
- **Git** for cloning the repository
- At least **8GB of RAM** (16GB recommended)
- **20GB of free disk space**
:::tip Alternative Development Environments
While this guide focuses on VS Code, you have many options for Dev Container development:
**Local Editors:**
- [IntelliJ IDEA](https://www.jetbrains.com/help/idea/connect-to-devcontainer.html) - Full JetBrains IDE support
- [neovim](https://github.com/jamestthompson3/nvim-remote-containers) - Lightweight terminal-based editor
- [Emacs](https://github.com/emacs-lsp/lsp-docker) - Extensible text editor
- [DevContainer CLI](https://github.com/devcontainers/cli) - Command-line interface
**Cloud-Based Solutions:**
- [GitHub Codespaces](https://github.com/features/codespaces) - Fully integrated with GitHub, excellent devcontainer.json support
- [GitPod](https://www.gitpod.io) - SaaS platform with recent Dev Container support (historically used gitpod.yml)
**Self-Hostable Options:**
- [Coder](https://coder.com) - Enterprise-focused, requires Terraform knowledge, self-managed
- [DevPod](https://devpod.sh) - Client-only tool with excellent devcontainer.json support, works with any provider (local, cloud, or on-premise)
:::
## Dev Container Services
The Dev Container environment consists of the following services:
| Service | Container Name | Description | Ports |
| ---------------- | ------------------------- | --------------------------------------------------------- | ----------------------------------------------------------------------- |
| Server & Web | `immich-server` | Runs both API server and web frontend in development mode | 2283 (API)<br/>3000 (Web)<br/>9230 (Workers Debug)<br/>9231 (API Debug) |
| Database | `database` | PostgreSQL database | 5432 |
| Cache | `redis` | Valkey cache server | 6379 |
| Machine Learning | `immich-machine-learning` | Immich ML model inference server | 3003 |
## Getting Started
### Step 1: Clone the Repository
```bash
git clone https://github.com/immich-app/immich.git
cd immich
```
### Step 2: Configure Environment Variables
The immich dev containers read environment variables from your shell environment, not from `.env` files. This allows them to work in cloud environments without pre-configuration.
:::important Configuration
When running locally, and if you want to create (or use an existing) DB and/or photo storage folder, you must set the `UPLOAD_LOCATION` variable in your shell environment before launching the Dev Container. This determines where uploaded files are stored and also where the DB stores it data.
```bash
# Set temporarily for current session
export UPLOAD_LOCATION=/opt/dev_upload_folder
# Or add to your shell profile for persistence
# (~/.bashrc, ~/.zshrc, ~/.bash_profile, etc.)
echo 'export UPLOAD_LOCATION=/opt/dev_upload_folder' >> ~/.bashrc
source ~/.bashrc
```
:::
### Step 3: Launch the Dev Container
:::tip
Immich development makes extensive use of specialized [base images](https://github.com/immich-app/base-images) for its docker-compose based development. For this reason, you won't be able to use VSCode's **_Clone Repository in a Container Volume_** command.
:::
#### Using VS Code UI:
1. Open the cloned repository in VS Code
2. Press `F1` or `Ctrl/Cmd+Shift+P` to open the command palette
3. Type and select "Dev Containers: Rebuild and Reopen in Container"
4. Select "Immich - Backend, Frontend and ML" from the list
5. Wait for the container to build and start (this may take several minutes on first run)
#### Using VS Code Quick Actions:
1. Open the repository in VS Code
2. You should see a popup asking if you want to reopen in a container
3. Click "Reopen in Container"
#### Using Command Line:
```bash
# Using the DevContainer CLI
devcontainer up --workspace-folder .
```
## Environment Variable Details
### How Dev Containers Handle Environment Variables
Unlike the Immich developer setup based on Docker Compose which uses `.env` files, Immich Dev Containers read environment variables from your shell environment. This is configured in `.devcontainer/devcontainer.json`:
```json
"remoteEnv": {
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
```
The `${localEnv:VARIABLE:default}` syntax reads from your shell environment with optional defaults.
### Upload Location Path Resolution
The `UPLOAD_LOCATION` environment variable controls where files are stored:
**Default:** `./Library` (relative to the `docker` directory)
**Resolved to:** `<immich-root>/docker/Library`
**Bind Mounts Created:**
```yaml
# From .devcontainer/server/container-compose-overrides.yml
- ${UPLOAD_LOCATION-./Library}/photos:/workspaces/immich/server/upload
- ${UPLOAD_LOCATION-./Library}/postgres:/var/lib/postgresql/data
```
### Database Configuration
These variables have sensible defaults (for development) but can be customized:
| Variable | Default | Description |
| ------------------ | ---------- | ------------------- |
| `DB_PASSWORD` | `postgres` | PostgreSQL password |
| `DB_USERNAME` | `postgres` | PostgreSQL username |
| `DB_DATABASE_NAME` | `immich` | Database name |
### Setting Environment Variables
Add these to your shell profile (`~/.bashrc`, `~/.zshrc`, `~/.bash_profile`, etc.):
```bash
# Required
export UPLOAD_LOCATION=./Library # or absolute path
# Optional (only if using non-default values)
export DB_PASSWORD=your_password
export DB_USERNAME=your_username
export DB_DATABASE_NAME=your_database
```
Remember to reload your shell configuration:
```bash
source ~/.bashrc # or ~/.zshrc, etc.
```
## Git Configuration
### SSH Keys and Authentication
To use your SSH keys for GitHub access inside the Dev Container:
1. **Start SSH Agent** on your host machine:
```bash
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/id_rsa # or your key path
```
2. **VS Code automatically forwards your SSH agent** to the container
For detailed instructions, see the [VS Code guide on sharing Git credentials](https://code.visualstudio.com/remote/advancedcontainers/sharing-git-credentials).
### Commit Signing
To use your SSH key for commit signing, see the [GitHub guide on SSH commit signing](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key#telling-git-about-your-ssh-key).
## Development Workflow
### Automatic Setup
When the Dev Container starts, it automatically:
1. **Runs post-create script** (`container-server-post-create.sh`):
- Adjusts file permissions for the `node` user
- Installs dependencies: `npm install` in all packages
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
2. **Starts development servers** via VS Code tasks:
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
- Both servers watch for file changes and recompile automatically
3. **Configures port forwarding**:
- Web UI: http://localhost:3000 (opens automatically)
- API: http://localhost:2283
- Debug ports: 9230 (workers), 9231 (API)
:::info
The Dev Container setup replaces the `make dev` command from the traditional setup. All services start automatically when you open the container.
:::
### Accessing Services
Once running, you can access:
| Service | URL | Description |
| -------- | --------------------- | ---------------------------------------------------------------------------------------------- |
| Web UI | http://localhost:3000 | Main web interface |
| API | http://localhost:2283 | REST API endpoints (Not used directly, web UI will expose this over http://localhost:3000/api) |
| Database | localhost:5432 | PostgreSQL (username: `postgres`) (Not used directly) |
### Connecting Mobile Apps
To connect the mobile app to your Dev Container:
1. Find your machine's IP address
2. In the mobile app, use: `http://YOUR_IP:3000/api`
3. Ensure your firewall allows connections on port 2283
### Making Code Changes
- **Server code** (`/server`): Changes trigger automatic restart
- **Web code** (`/web`): Changes trigger hot module replacement
- **Database migrations**: Run `npm run sync:sql` in the server directory
- **API changes**: Regenerate TypeScript SDK with `make open-api`
## Testing
### Running Tests
The Dev Container supports multiple ways to run tests:
#### Using Make Commands (Recommended)
```bash
# Run tests for specific components
make test-server # Server unit tests
make test-web # Web unit tests
make test-e2e # End-to-end tests
make test-cli # CLI tests
# Run all tests
make test-all # Runs tests for all components
# Medium tests (integration tests)
make test-medium-dev # End-to-end tests
```
#### Using NPM Directly
```bash
# Server tests
cd /workspaces/immich/server
npm test # Run all tests
npm run test:watch # Watch mode
npm run test:cov # Coverage report
# Web tests
cd /workspaces/immich/web
npm test # Run all tests
npm run test:watch # Watch mode
# E2E tests
cd /workspaces/immich/e2e
npm run test # Run API tests
npm run test:web # Run web UI tests
```
### Code Quality Commands
```bash
# Linting
make lint-server # Lint server code
make lint-web # Lint web code
make lint-all # Lint all components
# Formatting
make format-server # Format server code
make format-web # Format web code
make format-all # Format all code
# Type checking
make check-server # Type check server
make check-web # Type check web
make check-all # Check all components
# Complete hygiene check
make hygiene-all # Runs lint, format, check, SQL sync, and audit
```
### Additional Make Commands
```bash
# Build commands
make build-server # Build server
make build-web # Build web app
make build-all # Build everything
# API generation
make open-api # Generate OpenAPI specs
make open-api-typescript # Generate TypeScript SDK
make open-api-dart # Generate Dart SDK
# Database
make sql # Sync database schema
# Dependencies
make install-server # Install server dependencies
make install-web # Install web dependencies
make install-all # Install all dependencies
```
### Debugging
The Dev Container is pre-configured for debugging:
1. **API Server Debugging**:
- Set breakpoints in VS Code
- Press `F5` or use "Run and Debug" panel
- Select "Attach to Server" configuration
- Debug port: 9231
2. **Worker Debugging**:
- Use "Attach to Workers" configuration
- Debug port: 9230
3. **Web Debugging**:
- Use browser DevTools
- VS Code debugger for Chrome/Edge extensions supported
## Troubleshooting
### Common Issues
#### Permission Errors
**Problem**: `EACCES` or permission denied errors
**Solution**:
- The Dev Container runs as the `node` user (UID 1000)
- If your host UID differs, you may see permission issues
- Try rebuilding the container: "Dev Containers: Rebuild Container"
#### Container Won't Start
**Problem**: Dev Container fails to start or build
**Solution**:
1. Check Docker is running: `docker ps`
2. Clean Docker resources: `docker system prune -a`
3. Check available disk space
4. Review Docker Desktop resource limits
#### Port Already in Use
**Problem**: "Port 3000/2283 is already in use"
**Solution**:
1. Check for conflicting services: `lsof -i :3000` (macOS/Linux)
2. Stop conflicting services or change port mappings
3. Restart Docker Desktop
#### Upload Location Not Set
**Problem**: Errors about missing UPLOAD_LOCATION
**Solution**:
1. Set the environment variable: `export UPLOAD_LOCATION=./Library`
2. Add to your shell profile for persistence
3. Restart your terminal and VS Code
#### Database Connection Failed
**Problem**: Cannot connect to PostgreSQL
**Solution**:
1. Ensure all containers are running: `docker ps`
2. Check logs: "Dev Containers: Show Container Log"
3. Verify database credentials match environment variables
### Getting Help
If you encounter issues:
1. Check container logs: View → Output → Select "Dev Containers"
2. Rebuild without cache: "Dev Containers: Rebuild Container Without Cache"
3. Review [common Docker issues](https://docs.docker.com/desktop/troubleshoot/)
4. Ask in [Discord](https://discord.immich.app) `#help-desk-support` channel
## Mobile Development
While the Dev Container focuses on server and web development, you can connect mobile apps for testing:
### Connecting iOS/Android Apps
1. **Ensure API is accessible**:
```bash
# Find your machine's IP
# macOS
ipconfig getifaddr en0
# Linux
hostname -I
# Windows (in WSL2)
ip addr show eth0
```
2. **Configure mobile app**:
- Server URL: `http://YOUR_IP:2283/api`
- Ensure firewall allows port 2283
3. **For full mobile development**, see the [mobile development guide](/docs/developer/setup) which covers:
- Flutter setup
- Running on simulators/devices
- Mobile-specific debugging
## Advanced Configuration
### Custom VS Code Extensions
Add extensions to `.devcontainer/devcontainer.json`:
```json
"customizations": {
"vscode": {
"extensions": [
"your.extension-id"
]
}
}
```
### Additional Services
To add services (e.g., Redis Commander), modify:
1. `/docker/docker-compose.dev.yml` - Add service definition
2. `/.devcontainer/server/container-compose-overrides.yml` - Add overrides if needed
### Resource Limits
Adjust Docker Desktop resources:
- **macOS/Windows**: Docker Desktop → Settings → Resources
- **Linux**: Modify Docker daemon configuration
Recommended minimums:
- CPU: 4 cores
- Memory: 8GB
- Disk: 20GB
## Next Steps
- Read the [architecture overview](/docs/developer/architecture)
- Learn about [database migrations](/docs/developer/database-migrations)
- Explore [API documentation](/docs/api)
- Join `#immich` on [Discord](https://discord.immich.app)

View File

@@ -2,7 +2,7 @@
Folder view provides an additional view besides the timeline that is similar to a file explorer. It allows you to navigate through the folders and files in the library. This feature is handy for a highly curated and customized external library or a nicely configured storage template.
You can enable this feature under [`Account Settings > Features > Folders`](https://my.immich.app/user-settings?isOpen=feature+folders)
You can enable this feature under [`Account Settings > Features > Folder View`](https://my.immich.app/user-settings?isOpen=feature+folders)
## Enable folder view

View File

@@ -56,7 +56,7 @@ Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package t
### Automatic watching (EXPERIMENTAL)
This feature is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan.
This feature - currently hidden in the config file - is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan.
If your photos are on a network drive, automatic file watching likely won't work. In that case, you will have to rely on a periodic library refresh to pull in your changes.
@@ -112,15 +112,12 @@ _Remember to run `docker compose up -d` to register the changes. Make sure you c
These actions must be performed by the Immich administrator.
- Click on your avatar in the upper right corner
- Click on Administration -> External Libraries
- Click on Create an external library…
- Click on Administration -> Libraries
- Click on Create External Library
- Select which user owns the library, this can not be changed later
- Enter `/mnt/media/christmas-trip` then click Add
- Click on Save
- Click the drop-down menu on the newly created library
- Click on Scan
- Click the drop-down menu on the newly created library
- Click on Rename Library and rename it to "Christmas Trip"
NOTE: We have to use the `/mnt/media/christmas-trip` path and not the `/mnt/nas/christmas-trip` path since all paths have to be what the Docker containers see.
@@ -159,7 +156,9 @@ Within seconds, the assets from the old-pics and videos folders should show up i
Folder view provides an additional view besides the timeline that is similar to a file explorer. It allows you to navigate through the folders and files in the library. This feature is handy for a highly curated and customized external library or a nicely configured storage template.
You can enable this feature under [`Account Settings > Features > Folders`](https://my.immich.app/user-settings?isOpen=feature+folders)
You can enable this feature under [`Account Settings > Features > Folder View`](https://my.immich.app/user-settings?isOpen=feature+folders)
The UI is currently only available for the web; mobile will come in a subsequent release.
<img src={require('./img/folder-view-1.webp').default} width="100%" title='Folder-view' />
@@ -169,7 +168,7 @@ You can enable this feature under [`Account Settings > Features > Folders`](http
Only an admin can do this.
:::
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> External Library.
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
You can set the scanning interval using the preset or cron format. For more information you can refer to [Crontab Guru](https://crontab.guru/).
<img src={require('./img/library-custom-scan-interval.webp').default} width="75%" title='Set custom scan interval for external library' />

View File

@@ -88,9 +88,9 @@ It will only reflect files you add.
:::
If the same asset is in more than one album it will only sync to the first album it's in, after that it won't sync again even if the user clicks sync albums manually.
To overcome this limitation, the files must be removed from the ignore list by
To overcome this limitation, the files must be removed from the blacklist by
App settings -> Advanced -> Duplicate Assets -> Clear
:::info
Cleaning duplicate assets from the list will cause all the previously uploaded duplicate files to be re-uploaded, the files will not actually be uploaded and will be rejected on the server side (due to duplication) but will be synchronized to the album and at the end will be added to the ignore list again at the end of the synchronization.
Cleaning duplicate assets from the list will cause all the previously uploaded duplicate files to be re-uploaded, the files will not actually be uploaded and will be rejected on the server side (due to duplication) but will be synchronized to the album and at the end will be added to the black list again at the end of the synchronization.
:::

View File

@@ -16,7 +16,7 @@ For the full list, refer to the [Immich source code](https://github.com/immich-a
| `HEIC` | `.heic` | :white_check_mark: | |
| `HEIF` | `.heif` | :white_check_mark: | |
| `JPEG 2000` | `.jp2` | :white_check_mark: | |
| `JPEG` | `.jpeg` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
| `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
| `JPEG XL` | `.jxl` | :white_check_mark: | |
| `PNG` | `.png` | :white_check_mark: | |
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |

View File

@@ -41,7 +41,7 @@ In the Immich web UI:
- Click Add path
<img src={require('./img/add-path-button.webp').default} width="50%" title="Add Path button" />
- Enter **/home/user/photos1** as the path and click Add
- Enter **/usr/src/app/external** as the path and click Add
<img src={require('./img/add-path-field.webp').default} width="50%" title="Add Path field" />
- Save the new path

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -52,9 +52,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
### Local
# Backup Immich database
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
### Append to local Borg repository
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/

View File

@@ -123,7 +123,7 @@ The default configuration looks like this:
"buttonText": "Login with OAuth",
"clientId": "",
"clientSecret": "",
"defaultStorageQuota": null,
"defaultStorageQuota": 0,
"enabled": false,
"issuerUrl": "",
"mobileOverrideEnabled": false,

View File

@@ -34,7 +34,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `/usr/src/app/upload` | server | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
@@ -49,6 +49,9 @@ These environment variables are used by the `docker-compose.yml` file and do **N
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
It only needs to be set if the Immich deployment method is changing.
## Workers
| Variable | Description | Default | Containers |
@@ -69,25 +72,22 @@ Information on the current workers can be found [here](/docs/administration/jobs
## Database
| Variable | Description | Default | Containers |
| :---------------------------------- | :------------------------------------------------------------------------------------- | :--------: | :----------------------------- |
| `DB_URL` | Database URL | | server |
| `DB_HOSTNAME` | Database host | `database` | server |
| `DB_PORT` | Database port | `5432` | server |
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
| `DB_SSL_MODE` | Database SSL mode | | server |
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | server |
| Variable | Description | Default | Containers |
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
| `DB_URL` | Database URL | | server |
| `DB_HOSTNAME` | Database host | `database` | server |
| `DB_PORT` | Database port | `5432` | server |
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
| `DB_SSL_MODE` | Database SSL mode | | server |
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
\*3: Uses either [`postgresql.ssd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.ssd.conf) or [`postgresql.hdd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.hdd.conf) which mainly controls the Postgres `effective_io_concurrency` setting to allow for concurrenct IO on SSDs and sequential IO on HDDs.
:::info
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.

View File

@@ -39,8 +39,8 @@ alt="Dot Env Example"
/>
- Change the default `DB_PASSWORD`, and add custom database connection information if necessary.
- Change `DB_DATA_LOCATION` to a folder (absolute path) where the database will be saved to disk.
- Change `UPLOAD_LOCATION` to a folder (absolute path) where media (uploaded and generated) will be stored.
- Change `DB_DATA_LOCATION` to a folder where the database will be saved to disk.
- Change `UPLOAD_LOCATION` to a folder where media (uploaded and generated) will be stored.
11. Click on "**Deploy the stack**".

View File

@@ -25,7 +25,7 @@ When you're all done, you should have the following:
- `./docker/immich-app/postgres`
- `./docker/immich-app/library`
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) to your computer. Upload the files to the `./docker/immich-app` directory, and rename `example.env` to `.env`.
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) to your computer. Upload the files to the `./docker/immich-app` directory.
## Step 2 - Populate the .env file with custom values

View File

@@ -9,7 +9,7 @@ This is a community contribution and not officially supported by the Immich team
Community support can be found in the dedicated channel on the [Discord Server](https://discord.immich.app/).
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
:::
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.

View File

@@ -75,6 +75,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.

1531
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -16,9 +16,8 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "~3.8.0",
"@docusaurus/preset-classic": "~3.8.0",
"@docusaurus/theme-common": "~3.8.0",
"@docusaurus/core": "~3.7.0",
"@docusaurus/preset-classic": "~3.7.0",
"@mdi/js": "^7.3.67",
"@mdi/react": "^1.6.1",
"@mdx-js/react": "^3.0.0",
@@ -27,7 +26,6 @@
"clsx": "^2.0.0",
"docusaurus-lunr-search": "^3.3.2",
"docusaurus-preset-openapi": "^0.7.5",
"lunr": "^2.3.9",
"postcss": "^8.4.25",
"prism-react-renderer": "^2.3.1",
"raw-loader": "^4.0.2",
@@ -37,7 +35,7 @@
"url": "^0.11.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "~3.8.0",
"@docusaurus/module-type-aliases": "~3.7.0",
"@docusaurus/tsconfig": "^3.7.0",
"@docusaurus/types": "^3.7.0",
"prettier": "^3.2.4",
@@ -59,6 +57,6 @@
"node": ">=20"
},
"volta": {
"node": "22.17.1"
"node": "22.16.0"
}
}

View File

@@ -58,12 +58,6 @@ const guides: CommunityGuidesProps[] = [
description: 'Access Immich with an end-to-end encrypted connection.',
url: 'https://meshnet.nordvpn.com/how-to/remote-files-media-access/immich-remote-access',
},
{
title: 'Trust Self Signed Certificates with Immich - OAuth Setup',
description:
'Set up Certificate Authority trust with Immich, and your private OAuth2/OpenID service, while using a private CA for HTTPS commication.',
url: 'https://github.com/immich-app/immich/discussions/18614',
},
];
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {

View File

@@ -85,7 +85,6 @@ import React from 'react';
import { Item, Timeline } from '../components/timeline';
const releases = {
'v1.135.0': new Date(2025, 5, 18),
'v1.133.0': new Date(2025, 4, 21),
'v1.130.0': new Date(2025, 2, 25),
'v1.127.0': new Date(2025, 1, 26),
@@ -197,6 +196,14 @@ const roadmap: Item[] = [
description: 'Automate tasks with workflows',
getDateLabel: () => 'Planned for 2025',
},
{
done: false,
icon: mdiTableKey,
iconColor: 'gray',
title: 'Fine grained access controls',
description: 'Granular access controls for users and api keys',
getDateLabel: () => 'Planned for 2025',
},
{
done: false,
icon: mdiImageEdit,
@@ -232,26 +239,12 @@ const roadmap: Item[] = [
];
const milestones: Item[] = [
{
icon: mdiStar,
iconColor: 'gold',
title: '70,000 Stars',
description: 'Reached 70K Stars on GitHub!',
getDateLabel: withLanguage(new Date(2025, 6, 9)),
},
withRelease({
icon: mdiTableKey,
iconColor: 'gray',
title: 'Fine grained access controls',
description: 'Granular access controls for api keys',
release: 'v1.135.0',
}),
withRelease({
icon: mdiCast,
iconColor: 'aqua',
title: 'Google Cast (web and mobile)',
title: 'Google Cast (web)',
description: 'Cast assets to Google Cast/Chromecast compatible devices',
release: 'v1.135.0',
release: 'v1.133.0',
}),
withRelease({
icon: mdiLockOutline,

View File

@@ -1,5 +1,4 @@
/docs /docs/overview/welcome 307
/docs/ /docs/overview/welcome 307
/docs /docs/overview/introduction 307
/docs/mobile-app-beta-program /docs/features/mobile-app 307
/docs/contribution-guidelines /docs/overview/support-the-project#contributing 307
/docs/install /docs/install/docker-compose 307
@@ -31,4 +30,4 @@
/docs/guides/api-album-sync /docs/community-projects 307
/docs/guides/remove-offline-files /docs/community-projects 307
/milestones /roadmap 307
/docs/overview/introduction /docs/overview/welcome 307
/docs/overview/introduction /docs/overview/welcome 307

View File

@@ -1,24 +1,4 @@
[
{
"label": "v1.136.0",
"url": "https://v1.136.0.archive.immich.app"
},
{
"label": "v1.135.3",
"url": "https://v1.135.3.archive.immich.app"
},
{
"label": "v1.135.2",
"url": "https://v1.135.2.archive.immich.app"
},
{
"label": "v1.135.1",
"url": "https://v1.135.1.archive.immich.app"
},
{
"label": "v1.135.0",
"url": "https://v1.135.0.archive.immich.app"
},
{
"label": "v1.134.0",
"url": "https://v1.134.0.archive.immich.app"

View File

@@ -1 +1 @@
22.17.1
22.16.0

View File

@@ -3,6 +3,7 @@ name: immich-e2e
services:
immich-server:
container_name: immich-e2e-server
command: ['./start.sh']
image: immich-server:latest
build:
context: ../
@@ -35,10 +36,10 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:7fe72c486b910f6b1a9769c937dad5d63648ddee82e056f47417542dd40825bb
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:9c704fb49ce27549df00f1b096cc93f8b0c959ef087507704d74954808f78a82
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
environment:
POSTGRES_PASSWORD: postgres

2038
e2e/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.136.0",
"version": "1.134.0",
"description": "",
"main": "index.js",
"type": "module",
@@ -24,10 +24,9 @@
"@immich/cli": "file:../cli",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2",
"@types/node": "^22.16.4",
"@types/oidc-provider": "^9.0.0",
"@types/node": "^22.15.21",
"@types/oidc-provider": "^8.5.1",
"@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2",
@@ -35,17 +34,16 @@
"eslint": "^9.14.0",
"eslint-config-prettier": "^10.0.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^59.0.0",
"eslint-plugin-unicorn": "^57.0.0",
"exiftool-vendored": "^28.3.1",
"globals": "^16.0.0",
"jose": "^5.6.3",
"luxon": "^3.4.4",
"oidc-provider": "^9.0.0",
"oidc-provider": "^8.5.1",
"pg": "^8.11.3",
"pngjs": "^7.0.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"sharp": "^0.34.0",
"socket.io-client": "^4.7.4",
"supertest": "^7.0.0",
"typescript": "^5.3.3",
@@ -54,6 +52,6 @@
"vitest": "^3.0.0"
},
"volta": {
"node": "22.17.1"
"node": "22.16.0"
}
}

View File

@@ -7,7 +7,6 @@ import {
ReactionType,
createActivity as create,
createAlbum,
removeAssetFromAlbum,
} from '@immich/sdk';
import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
@@ -343,36 +342,5 @@ describe('/activities', () => {
expect(status).toBe(204);
});
it('should return empty list when asset is removed', async () => {
const album3 = await createAlbum(
{
createAlbumDto: {
albumName: 'Album 3',
assetIds: [asset.id],
},
},
{ headers: asBearerAuth(admin.accessToken) },
);
await createActivity({ albumId: album3.id, assetId: asset.id, type: ReactionType.Like });
await removeAssetFromAlbum(
{
id: album3.id,
bulkIdsDto: {
ids: [asset.id],
},
},
{ headers: asBearerAuth(admin.accessToken) },
);
const { status, body } = await request(app)
.get('/activities')
.query({ albumId: album.id })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toEqual(200);
expect(body).toEqual([]);
});
});
});

View File

@@ -20,7 +20,7 @@ describe('/api-keys', () => {
});
beforeEach(async () => {
await utils.resetDatabase(['api_key']);
await utils.resetDatabase(['api_keys']);
});
describe('POST /api-keys', () => {

View File

@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { readFile, writeFile } from 'node:fs/promises';
import { basename, join } from 'node:path';
import sharp from 'sharp';
import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures';
import { makeRandomImage } from 'src/generators';
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
}) as DateTime<true>;
const yesterday = today.minus({ days: 1 });
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
// Generate unique color to ensure different checksums for each image
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
// Create a 100x100 solid color JPEG using Sharp
const imageBytes = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
// Add random suffix to filename to avoid collisions
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
const filepath = join(tempDir, uniqueFilename);
await writeFile(filepath, imageBytes);
// Filter out undefined values before writing EXIF
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
await exiftool.write(filepath, cleanExifData);
// Re-read the image bytes after EXIF has been written
const finalImageBytes = await readFile(filepath);
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
};
describe('/asset', () => {
let admin: LoginResponseDto;
let websocket: Socket;
@@ -1225,411 +1190,6 @@ describe('/asset', () => {
});
});
describe('EXIF metadata extraction', () => {
describe('Additional date tag extraction', () => {
describe('Date-time vs time-only tag handling', () => {
it('should fall back to file timestamps when only time-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
// Exclude all date-time tags to force fallback to file timestamps
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
SonyDateTime2: undefined,
GPSDateStamp: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should prefer DateTimeOriginal over time-only tags', async () => {
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal, not TimeCreated
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
describe('GPSDateTime tag extraction', () => {
it('should extract GPSDateTime with GPS coordinates', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: 37.7749,
GPSLongitude: -122.4194,
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T12:30:00.000Z').getTime(),
);
});
});
describe('CreateDate tag extraction', () => {
it('should extract CreateDate when available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
CreateDate: '2023:11:15 10:30:00',
// Exclude other higher priority date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T10:30:00.000Z').getTime(),
);
});
});
describe('GPSDateStamp tag extraction', () => {
it('should fall back to file timestamps when only date-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
GPSLatitude: 51.5074,
GPSLongitude: -0.1278,
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
DateTimeOriginal: undefined,
CreateDate: undefined,
CreationDate: undefined,
GPSDateTime: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
});
/*
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
*
* NOT WRITABLE to JPEG:
* - MediaCreateDate: Can be read from video files but not written to JPEG
* - DateTimeCreated: Read-only tag in JPEG format
* - DateTimeUTC: Cannot be written to JPEG files
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
* - SubSecMediaCreateDate: Tag not defined for JPEG format
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
*
* WRITABLE but NOT READABLE from JPEG:
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
* - SubSecCreateDate: Can be written but not read back from JPEG
*
* EFFECTIVELY TESTABLE TAGS (writable and readable):
* - DateTimeOriginal ✓
* - CreateDate ✓
* - CreationDate ✓
* - GPSDateTime ✓
*
* The metadata service correctly handles non-readable tags and will fall back to
* file timestamps when only non-readable tags are present.
*/
describe('Date tag priority order', () => {
it('should respect the complete date tag priority order', async () => {
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
const testCases = [
{
name: 'DateTimeOriginal has highest priority among testable tags',
exifData: {
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-04-04T04:00:00.000Z',
},
{
name: 'CreateDate when DateTimeOriginal missing',
exifData: {
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-05-05T05:00:00.000Z',
},
{
name: 'CreationDate when standard EXIF tags missing',
exifData: {
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-07-07T07:00:00.000Z',
},
{
name: 'GPSDateTime when no other testable date tags present',
exifData: {
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
Make: 'SONY',
},
expectedDate: '2023-10-10T10:00:00.000Z',
},
];
for (const testCase of testCases) {
const { imageBytes, filename } = await createTestImageWithExif(
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
testCase.exifData,
);
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
expect(
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
`Date mismatch for: ${testCase.name}`,
).toBe(new Date(testCase.expectedDate).getTime());
}
});
});
describe('Edge cases for date tag handling', () => {
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
// Intentionally no GPSTimeStamp
// Exclude all other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should handle all testable date tags present to verify complete priority order', async () => {
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
// All TESTABLE date tags to JPEG format (writable AND readable)
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
// Note: Excluded non-testable tags:
// SubSec tags: writable but not readable from JPEG
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal as it has the highest priority among testable tags
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-04-04T04:00:00.000Z').getTime(),
);
});
it('should use CreationDate when SubSec tags are missing', async () => {
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
CreationDate: '2023:07:07 07:00:00', // WRITABLE
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
// Exclude SubSec and standard EXIF tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use CreationDate when available
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-07-07T07:00:00.000Z').getTime(),
);
});
it('should skip invalid date formats and use next valid tag', async () => {
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
// Note: Testing invalid date handling with only WRITABLE tags
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should skip invalid dates and use the first valid one (GPSDateTime)
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
});
});
describe('POST /assets/exist', () => {
it('ignores invalid deviceAssetIds', async () => {
const response = await utils.checkExistingAssets(user1.accessToken, {

View File

@@ -0,0 +1,146 @@
import { LoginResponseDto, login, signUpAdmin } from '@immich/sdk';
import { loginDto, signupDto } from 'src/fixtures';
import { errorDto, loginResponseDto, signupResponseDto } from 'src/responses';
import { app, utils } from 'src/utils';
import request from 'supertest';
import { beforeEach, describe, expect, it } from 'vitest';
const { email, password } = signupDto.admin;
describe(`/auth/admin-sign-up`, () => {
beforeEach(async () => {
await utils.resetDatabase();
});
describe('POST /auth/admin-sign-up', () => {
it(`should sign up the admin`, async () => {
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
expect(status).toBe(201);
expect(body).toEqual(signupResponseDto.admin);
});
it('should not allow a second admin to sign up', async () => {
await signUpAdmin({ signUpDto: signupDto.admin });
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
expect(status).toBe(400);
expect(body).toEqual(errorDto.alreadyHasAdmin);
});
});
});
describe('/auth/*', () => {
let admin: LoginResponseDto;
beforeEach(async () => {
await utils.resetDatabase();
await signUpAdmin({ signUpDto: signupDto.admin });
admin = await login({ loginCredentialDto: loginDto.admin });
});
describe(`POST /auth/login`, () => {
it('should reject an incorrect password', async () => {
const { status, body } = await request(app).post('/auth/login').send({ email, password: 'incorrect' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.incorrectLogin);
});
it('should accept a correct password', async () => {
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
expect(status).toBe(201);
expect(body).toEqual(loginResponseDto.admin);
const token = body.accessToken;
expect(token).toBeDefined();
const cookies = headers['set-cookie'];
expect(cookies).toHaveLength(3);
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
`immich_access_token=${token}`,
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
'immich_auth_type=password',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
'immich_is_authenticated=true',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'SameSite=Lax',
]);
});
});
describe('POST /auth/validateToken', () => {
it('should reject an invalid token', async () => {
const { status, body } = await request(app).post(`/auth/validateToken`).set('Authorization', 'Bearer 123');
expect(status).toBe(401);
expect(body).toEqual(errorDto.invalidToken);
});
it('should accept a valid token', async () => {
const { status, body } = await request(app)
.post(`/auth/validateToken`)
.send({})
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({ authStatus: true });
});
});
describe('POST /auth/change-password', () => {
it('should require the current password', async () => {
const { status, body } = await request(app)
.post(`/auth/change-password`)
.send({ password: 'wrong-password', newPassword: 'Password1234' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.wrongPassword);
});
it('should change the password', async () => {
const { status } = await request(app)
.post(`/auth/change-password`)
.send({ password, newPassword: 'Password1234' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
await login({
loginCredentialDto: {
email: 'admin@immich.cloud',
password: 'Password1234',
},
});
});
});
describe('POST /auth/logout', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/auth/logout`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should logout the user', async () => {
const { status, body } = await request(app)
.post(`/auth/logout`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
});
});
});

View File

@@ -6,7 +6,7 @@ import {
createMemory,
getMemory,
} from '@immich/sdk';
import { createUserDto } from 'src/fixtures';
import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest';
@@ -17,6 +17,7 @@ describe('/memories', () => {
let user: LoginResponseDto;
let adminAsset: AssetMediaResponseDto;
let userAsset1: AssetMediaResponseDto;
let userAsset2: AssetMediaResponseDto;
let userMemory: MemoryResponseDto;
beforeAll(async () => {
@@ -24,9 +25,10 @@ describe('/memories', () => {
admin = await utils.adminSetup();
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
[adminAsset, userAsset1] = await Promise.all([
[adminAsset, userAsset1, userAsset2] = await Promise.all([
utils.createAsset(admin.accessToken),
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
]);
userMemory = await createMemory(
{
@@ -41,7 +43,121 @@ describe('/memories', () => {
);
});
describe('GET /memories', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/memories');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
});
describe('POST /memories', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/memories');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should validate data when type is on this day', async () => {
const { status, body } = await request(app)
.post('/memories')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({
type: 'on_this_day',
data: {},
memoryAt: new Date(2021).toISOString(),
});
expect(status).toBe(400);
expect(body).toEqual(
errorDto.badRequest(['data.year must be a positive number', 'data.year must be an integer number']),
);
});
it('should create a new memory', async () => {
const { status, body } = await request(app)
.post('/memories')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({
type: 'on_this_day',
data: { year: 2021 },
memoryAt: new Date(2021).toISOString(),
});
expect(status).toBe(201);
expect(body).toEqual({
id: expect.any(String),
type: 'on_this_day',
data: { year: 2021 },
createdAt: expect.any(String),
updatedAt: expect.any(String),
isSaved: false,
memoryAt: expect.any(String),
ownerId: user.userId,
assets: [],
});
});
it('should create a new memory (with assets)', async () => {
const { status, body } = await request(app)
.post('/memories')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({
type: 'on_this_day',
data: { year: 2021 },
memoryAt: new Date(2021).toISOString(),
assetIds: [userAsset1.id, userAsset2.id],
});
expect(status).toBe(201);
expect(body).toMatchObject({
id: expect.any(String),
assets: expect.arrayContaining([
expect.objectContaining({ id: userAsset1.id }),
expect.objectContaining({ id: userAsset2.id }),
]),
});
expect(body.assets).toHaveLength(2);
});
it('should create a new memory and ignore assets the user does not have access to', async () => {
const { status, body } = await request(app)
.post('/memories')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({
type: 'on_this_day',
data: { year: 2021 },
memoryAt: new Date(2021).toISOString(),
assetIds: [userAsset1.id, adminAsset.id],
});
expect(status).toBe(201);
expect(body).toMatchObject({
id: expect.any(String),
assets: [expect.objectContaining({ id: userAsset1.id })],
});
expect(body.assets).toHaveLength(1);
});
});
describe('GET /memories/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/memories/${uuidDto.invalid}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.get(`/memories/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => {
const { status, body } = await request(app)
.get(`/memories/${userMemory.id}`)
@@ -60,6 +176,22 @@ describe('/memories', () => {
});
describe('PUT /memories/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/memories/${uuidDto.invalid}`).send({ isSaved: true });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.put(`/memories/${uuidDto.invalid}`)
.send({ isSaved: true })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => {
const { status, body } = await request(app)
.put(`/memories/${userMemory.id}`)
@@ -86,6 +218,23 @@ describe('/memories', () => {
});
describe('PUT /memories/:id/assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.put(`/memories/${userMemory.id}/assets`)
.send({ ids: [userAsset1.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.put(`/memories/${uuidDto.invalid}/assets`)
.send({ ids: [userAsset1.id] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => {
const { status, body } = await request(app)
.put(`/memories/${userMemory.id}/assets`)
@@ -95,6 +244,15 @@ describe('/memories', () => {
expect(body).toEqual(errorDto.noPermission);
});
it('should require a valid asset id', async () => {
const { status, body } = await request(app)
.put(`/memories/${userMemory.id}/assets`)
.send({ ids: [uuidDto.invalid] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
});
it('should require asset access', async () => {
const { status, body } = await request(app)
.put(`/memories/${userMemory.id}/assets`)
@@ -121,6 +279,23 @@ describe('/memories', () => {
});
describe('DELETE /memories/:id/assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.delete(`/memories/${userMemory.id}/assets`)
.send({ ids: [userAsset1.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.delete(`/memories/${uuidDto.invalid}/assets`)
.send({ ids: [userAsset1.id] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => {
const { status, body } = await request(app)
.delete(`/memories/${userMemory.id}/assets`)
@@ -130,6 +305,15 @@ describe('/memories', () => {
expect(body).toEqual(errorDto.noPermission);
});
it('should require a valid asset id', async () => {
const { status, body } = await request(app)
.delete(`/memories/${userMemory.id}/assets`)
.send({ ids: [uuidDto.invalid] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
});
it('should only remove assets in the memory', async () => {
const { status, body } = await request(app)
.delete(`/memories/${userMemory.id}/assets`)
@@ -156,6 +340,21 @@ describe('/memories', () => {
});
describe('DELETE /memories/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete(`/memories/${uuidDto.invalid}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.delete(`/memories/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => {
const { status, body } = await request(app)
.delete(`/memories/${userMemory.id}`)

View File

@@ -227,21 +227,6 @@ describe(`/oauth`, () => {
expect(user.storageLabel).toBe('user-username');
});
it('should set the admin status from a role claim', async () => {
const callbackParams = await loginWithOAuth(OAuthUser.WITH_ROLE);
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
userId: expect.any(String),
userEmail: 'oauth-with-role@immich.app',
isAdmin: true,
});
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
expect(user.isAdmin).toBe(true);
});
it('should work with RS256 signed tokens', async () => {
await setupOAuth(admin.accessToken, {
enabled: true,

View File

@@ -11,32 +11,11 @@ describe('/people', () => {
let hiddenPerson: PersonResponseDto;
let multipleAssetsPerson: PersonResponseDto;
let nameAlicePerson: PersonResponseDto;
let nameBobPerson: PersonResponseDto;
let nameCharliePerson: PersonResponseDto;
let nameNullPerson4Assets: PersonResponseDto;
let nameNullPerson3Assets: PersonResponseDto;
let nameNullPerson1Asset: PersonResponseDto;
let nameBillPersonFavourite: PersonResponseDto;
let nameFreddyPersonFavourite: PersonResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
[
visiblePerson,
hiddenPerson,
multipleAssetsPerson,
nameCharliePerson,
nameBobPerson,
nameAlicePerson,
nameNullPerson4Assets,
nameNullPerson3Assets,
nameNullPerson1Asset,
nameBillPersonFavourite,
nameFreddyPersonFavourite,
] = await Promise.all([
[visiblePerson, hiddenPerson, multipleAssetsPerson] = await Promise.all([
utils.createPerson(admin.accessToken, {
name: 'visible_person',
}),
@@ -47,39 +26,10 @@ describe('/people', () => {
utils.createPerson(admin.accessToken, {
name: 'multiple_assets_person',
}),
// --- Setup for the specific sorting test ---
utils.createPerson(admin.accessToken, {
name: 'Charlie',
}),
utils.createPerson(admin.accessToken, {
name: 'Bob',
}),
utils.createPerson(admin.accessToken, {
name: 'Alice',
}),
utils.createPerson(admin.accessToken, {
name: '',
}),
utils.createPerson(admin.accessToken, {
name: '',
}),
utils.createPerson(admin.accessToken, {
name: '',
}),
utils.createPerson(admin.accessToken, {
name: 'Bill',
isFavorite: true,
}),
utils.createPerson(admin.accessToken, {
name: 'Freddy',
isFavorite: true,
}),
]);
const asset1 = await utils.createAsset(admin.accessToken);
const asset2 = await utils.createAsset(admin.accessToken);
const asset3 = await utils.createAsset(admin.accessToken);
const asset4 = await utils.createAsset(admin.accessToken);
await Promise.all([
utils.createFace({ assetId: asset1.id, personId: visiblePerson.id }),
@@ -87,27 +37,6 @@ describe('/people', () => {
utils.createFace({ assetId: asset1.id, personId: multipleAssetsPerson.id }),
utils.createFace({ assetId: asset1.id, personId: multipleAssetsPerson.id }),
utils.createFace({ assetId: asset2.id, personId: multipleAssetsPerson.id }),
utils.createFace({ assetId: asset3.id, personId: multipleAssetsPerson.id }), // 4 assets
// Named persons
utils.createFace({ assetId: asset1.id, personId: nameCharliePerson.id }), // 1 asset
utils.createFace({ assetId: asset1.id, personId: nameBobPerson.id }),
utils.createFace({ assetId: asset2.id, personId: nameBobPerson.id }), // 2 assets
utils.createFace({ assetId: asset1.id, personId: nameAlicePerson.id }), // 1 asset
// Null-named person 4 assets
utils.createFace({ assetId: asset1.id, personId: nameNullPerson4Assets.id }),
utils.createFace({ assetId: asset2.id, personId: nameNullPerson4Assets.id }),
utils.createFace({ assetId: asset3.id, personId: nameNullPerson4Assets.id }),
utils.createFace({ assetId: asset4.id, personId: nameNullPerson4Assets.id }), // 4 assets
// Null-named person 3 assets
utils.createFace({ assetId: asset1.id, personId: nameNullPerson3Assets.id }),
utils.createFace({ assetId: asset2.id, personId: nameNullPerson3Assets.id }),
utils.createFace({ assetId: asset3.id, personId: nameNullPerson3Assets.id }), // 3 assets
// Null-named person 1 asset
utils.createFace({ assetId: asset3.id, personId: nameNullPerson1Asset.id }),
// Favourite People
utils.createFace({ assetId: asset1.id, personId: nameFreddyPersonFavourite.id }),
utils.createFace({ assetId: asset2.id, personId: nameFreddyPersonFavourite.id }),
utils.createFace({ assetId: asset1.id, personId: nameBillPersonFavourite.id }),
]);
});
@@ -122,66 +51,27 @@ describe('/people', () => {
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: false,
total: 11,
total: 3,
hidden: 1,
people: [
expect.objectContaining({ name: 'Freddy' }),
expect.objectContaining({ name: 'Bill' }),
expect.objectContaining({ name: 'multiple_assets_person' }),
expect.objectContaining({ name: 'Bob' }),
expect.objectContaining({ name: 'Alice' }),
expect.objectContaining({ name: 'Charlie' }),
expect.objectContaining({ name: 'visible_person' }),
expect.objectContaining({ id: nameNullPerson4Assets.id, name: '' }),
expect.objectContaining({ id: nameNullPerson3Assets.id, name: '' }),
expect.objectContaining({ name: 'hidden_person' }), // Should really be before the null names
expect.objectContaining({ name: 'hidden_person' }),
],
});
});
it('should sort visible people by asset count (desc), then by name (asc, nulls last)', async () => {
const { status, body } = await request(app).get('/people').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body.hasNextPage).toBe(false);
expect(body.total).toBe(11); // All persons
expect(body.hidden).toBe(1); // 'hidden_person'
const people = body.people as PersonResponseDto[];
expect(people.map((p) => p.id)).toEqual([
nameFreddyPersonFavourite.id, // name: 'Freddy', count: 2
nameBillPersonFavourite.id, // name: 'Bill', count: 1
multipleAssetsPerson.id, // name: 'multiple_assets_person', count: 3
nameBobPerson.id, // name: 'Bob', count: 2
nameAlicePerson.id, // name: 'Alice', count: 1
nameCharliePerson.id, // name: 'Charlie', count: 1
visiblePerson.id, // name: 'visible_person', count: 1
nameNullPerson4Assets.id, // name: '', count: 4
nameNullPerson3Assets.id, // name: '', count: 3
]);
expect(people.some((p) => p.id === hiddenPerson.id)).toBe(false);
});
it('should return only visible people', async () => {
const { status, body } = await request(app).get('/people').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: false,
total: 11,
total: 3,
hidden: 1,
people: [
expect.objectContaining({ name: 'Freddy' }),
expect.objectContaining({ name: 'Bill' }),
expect.objectContaining({ name: 'multiple_assets_person' }),
expect.objectContaining({ name: 'Bob' }),
expect.objectContaining({ name: 'Alice' }),
expect.objectContaining({ name: 'Charlie' }),
expect.objectContaining({ name: 'visible_person' }),
expect.objectContaining({ id: nameNullPerson4Assets.id, name: '' }),
expect.objectContaining({ id: nameNullPerson3Assets.id, name: '' }),
],
});
});
@@ -190,14 +80,14 @@ describe('/people', () => {
const { status, body } = await request(app)
.get('/people')
.set('Authorization', `Bearer ${admin.accessToken}`)
.query({ withHidden: true, page: 5, size: 1 });
.query({ withHidden: true, page: 2, size: 1 });
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: true,
total: 11,
total: 3,
hidden: 1,
people: [expect.objectContaining({ name: 'Alice' })],
people: [expect.objectContaining({ name: 'visible_person' })],
});
});
});
@@ -238,7 +128,7 @@ describe('/people', () => {
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual(expect.objectContaining({ assets: 3 }));
expect(body).toEqual(expect.objectContaining({ assets: 2 }));
});
});

View File

@@ -117,25 +117,8 @@ describe('/shared-links', () => {
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
expect(resp.status).toBe(200);
expect(resp.header['content-type']).toContain('text/html');
expect(resp.text).toContain(`<meta property="og:image" content="http://127.0.0.1:2285`);
});
it('should fall back to my.immich.app og:image meta tag for shared asset if Host header is not present', async () => {
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`).set('Host', '');
expect(resp.status).toBe(200);
expect(resp.header['content-type']).toContain('text/html');
expect(resp.text).toContain(`<meta property="og:image" content="https://my.immich.app`);
});
it('should return 404 for an invalid shared link', async () => {
const resp = await request(shareUrl).get(`/invalid-key`);
expect(resp.status).toBe(404);
expect(resp.header['content-type']).toContain('text/html');
expect(resp.text).not.toContain(`og:type`);
expect(resp.text).not.toContain(`og:title`);
expect(resp.text).not.toContain(`og:description`);
expect(resp.text).not.toContain(`og:image`);
});
});
describe('GET /shared-links', () => {

View File

@@ -15,6 +15,12 @@ describe('/system-config', () => {
});
describe('PUT /system-config', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put('/system-config');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should always return the new config', async () => {
const config = await getSystemConfig(admin.accessToken);

View File

@@ -37,7 +37,7 @@ describe('/tags', () => {
beforeEach(async () => {
// tagging assets eventually triggers metadata extraction which can impact other tests
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
await utils.resetDatabase(['tag']);
await utils.resetDatabase(['tags']);
});
describe('POST /tags', () => {

View File

@@ -0,0 +1,230 @@
import {
AssetMediaResponseDto,
AssetVisibility,
LoginResponseDto,
SharedLinkType,
TimeBucketAssetResponseDto,
} from '@immich/sdk';
import { DateTime } from 'luxon';
import { createUserDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest';
// TODO this should probably be a test util function
const today = DateTime.fromObject({
year: 2023,
month: 11,
day: 3,
}) as DateTime<true>;
const yesterday = today.minus({ days: 1 });
describe('/timeline', () => {
let admin: LoginResponseDto;
let user: LoginResponseDto;
let timeBucketUser: LoginResponseDto;
let user1Assets: AssetMediaResponseDto[];
let user2Assets: AssetMediaResponseDto[];
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup({ onboarding: false });
[user, timeBucketUser] = await Promise.all([
utils.userSetup(admin.accessToken, createUserDto.create('1')),
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
]);
user1Assets = await Promise.all([
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken, {
isFavorite: true,
fileCreatedAt: yesterday.toISO(),
fileModifiedAt: yesterday.toISO(),
assetData: { filename: 'example.mp4' },
}),
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
]);
user2Assets = await Promise.all([
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-12').toISOString() }),
]);
await utils.deleteAssets(timeBucketUser.accessToken, [user2Assets[4].id]);
});
describe('GET /timeline/buckets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/buckets');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get time buckets by month', async () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual(
expect.arrayContaining([
{ count: 3, timeBucket: '1970-02-01' },
{ count: 1, timeBucket: '1970-01-01' },
]),
);
});
it('should not allow access for unrelated shared links', async () => {
const sharedLink = await utils.createSharedLink(user.accessToken, {
type: SharedLinkType.Individual,
assetIds: user1Assets.map(({ id }) => id),
});
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should return error if time bucket is requested with partners asset and archived', async () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, visibility: AssetVisibility.Archive });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, visibility: undefined });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
});
it('should return error if time bucket is requested with partners asset and favorite', async () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: true });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: false });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
});
it('should return error if time bucket is requested with partners asset and trash', async () => {
const req = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, isTrashed: true });
expect(req.status).toBe(400);
expect(req.body).toEqual(errorDto.badRequest());
});
});
describe('GET /timeline/bucket', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/bucket').query({
timeBucket: '1900-01-01',
});
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should handle 5 digit years', async () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.query({ timeBucket: '012345-01-01' })
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
visibility: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
fileCreatedAt: [],
localOffsetHours: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
});
// TODO enable date string validation while still accepting 5 digit years
// it('should fail if time bucket is invalid', async () => {
// const { status, body } = await request(app)
// .get('/timeline/bucket')
// .set('Authorization', `Bearer ${user.accessToken}`)
// .query({ timeBucket: 'foo' });
// expect(status).toBe(400);
// expect(body).toEqual(errorDto.badRequest);
// });
it('should return time bucket', async () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ timeBucket: '1970-02-10' });
expect(status).toBe(200);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
visibility: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
fileCreatedAt: [],
localOffsetHours: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
});
it('should return time bucket in trash', async () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ timeBucket: '1970-02-01T00:00:00.000Z', isTrashed: true });
expect(status).toBe(200);
const timeBucket: TimeBucketAssetResponseDto = body;
expect(timeBucket.isTrashed).toEqual([true]);
});
});
});

View File

@@ -118,7 +118,7 @@ describe('/admin/users', () => {
});
}
it('should accept `isAdmin`', async () => {
it('should ignore `isAdmin`', async () => {
const { status, body } = await request(app)
.post(`/admin/users`)
.send({
@@ -130,7 +130,7 @@ describe('/admin/users', () => {
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toMatchObject({
email: 'user5@immich.cloud',
isAdmin: true,
isAdmin: false,
shouldChangePassword: true,
});
expect(status).toBe(201);
@@ -163,15 +163,14 @@ describe('/admin/users', () => {
});
}
it('should allow a non-admin to become an admin', async () => {
const user = await utils.userSetup(admin.accessToken, createUserDto.create('admin2'));
it('should not allow a non-admin to become an admin', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${user.userId}`)
.put(`/admin/users/${nonAdmin.userId}`)
.send({ isAdmin: true })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ isAdmin: true });
expect(body).toMatchObject({ isAdmin: false });
});
it('ignores updates to profileImagePath', async () => {

View File

@@ -97,7 +97,7 @@ describe(`immich upload`, () => {
});
beforeEach(async () => {
await utils.resetDatabase(['asset', 'album']);
await utils.resetDatabase(['assets', 'albums']);
});
describe(`immich upload /path/to/file.jpg`, () => {

View File

@@ -1,178 +0,0 @@
#!/usr/bin/env node
/**
* Script to generate test images with additional EXIF date tags
* This creates actual JPEG images with embedded metadata for testing
* Images are generated into e2e/test-assets/metadata/dates/
*/
import { execSync } from 'node:child_process';
import { writeFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import sharp from 'sharp';
interface TestImage {
filename: string;
description: string;
exifTags: Record<string, string>;
}
const testImages: TestImage[] = [
{
filename: 'time-created.jpg',
description: 'Image with TimeCreated tag',
exifTags: {
TimeCreated: '2023:11:15 14:30:00',
Make: 'Canon',
Model: 'EOS R5',
},
},
{
filename: 'gps-datetime.jpg',
description: 'Image with GPSDateTime and coordinates',
exifTags: {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: '37.7749',
GPSLongitude: '-122.4194',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'datetime-utc.jpg',
description: 'Image with DateTimeUTC tag',
exifTags: {
DateTimeUTC: '2023:11:15 10:30:00',
Make: 'Nikon',
Model: 'D850',
},
},
{
filename: 'gps-datestamp.jpg',
description: 'Image with GPSDateStamp and GPSTimeStamp',
exifTags: {
GPSDateStamp: '2023:11:15',
GPSTimeStamp: '08:30:00',
GPSLatitude: '51.5074',
GPSLongitude: '-0.1278',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'sony-datetime2.jpg',
description: 'Sony camera image with SonyDateTime2 tag',
exifTags: {
SonyDateTime2: '2023:11:15 06:30:00',
Make: 'SONY',
Model: 'ILCE-7RM5',
},
},
{
filename: 'date-priority-test.jpg',
description: 'Image with multiple date tags to test priority',
exifTags: {
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
DateTimeOriginal: '2023:02:02 02:00:00',
SubSecCreateDate: '2023:03:03 03:00:00',
CreateDate: '2023:04:04 04:00:00',
CreationDate: '2023:05:05 05:00:00',
DateTimeCreated: '2023:06:06 06:00:00',
TimeCreated: '2023:07:07 07:00:00',
GPSDateTime: '2023:08:08 08:00:00',
DateTimeUTC: '2023:09:09 09:00:00',
GPSDateStamp: '2023:10:10',
SonyDateTime2: '2023:11:11 11:00:00',
},
},
{
filename: 'new-tags-only.jpg',
description: 'Image with only additional date tags (no standard tags)',
exifTags: {
TimeCreated: '2023:12:01 15:45:30',
GPSDateTime: '2023:12:01 13:45:30Z',
DateTimeUTC: '2023:12:01 13:45:30',
GPSDateStamp: '2023:12:01',
SonyDateTime2: '2023:12:01 08:45:30',
GPSLatitude: '40.7128',
GPSLongitude: '-74.0060',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
];
const generateTestImages = async (): Promise<void> => {
// Target directory: e2e/test-assets/metadata/dates/
// Current file is in: e2e/src/
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
console.log('Generating test images with additional EXIF date tags...');
console.log(`Target directory: ${targetDir}`);
for (const image of testImages) {
try {
const imagePath = join(targetDir, image.filename);
// Create unique JPEG file using Sharp
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
const jpegData = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
writeFileSync(imagePath, jpegData);
// Build exiftool command to add EXIF data
const exifArgs = Object.entries(image.exifTags)
.map(([tag, value]) => `-${tag}="${value}"`)
.join(' ');
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
console.log(`Creating ${image.filename}: ${image.description}`);
execSync(command, { stdio: 'pipe' });
// Verify the tags were written
const verifyCommand = `exiftool -json "${imagePath}"`;
const result = execSync(verifyCommand, { encoding: 'utf8' });
const metadata = JSON.parse(result)[0];
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
// Log first date tag found for verification
const firstDateTag = Object.keys(image.exifTags).find(
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
);
if (firstDateTag && metadata[firstDateTag]) {
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
}
} catch (error) {
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
}
}
console.log('\nTest image generation complete!');
console.log('Files created in:', targetDir);
console.log('\nTo test these images:');
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
};
export { generateTestImages };
// Run the generator if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateTestImages().catch(console.error);
}

View File

@@ -7,44 +7,6 @@ describe(`immich-admin`, () => {
await utils.adminSetup();
});
describe('revoke-admin', () => {
it('should revoke admin privileges from a user', async () => {
const { child, promise } = immichAdmin(['revoke-admin']);
let data = '';
child.stdout.on('data', (chunk) => {
data += chunk;
if (data.includes('Please enter the user email:')) {
child.stdin.end('admin@immich.cloud\n');
}
});
const { stdout, exitCode } = await promise;
expect(exitCode).toBe(0);
expect(stdout).toContain('Admin access has been revoked from');
});
});
describe('grant-admin', () => {
it('should grant admin privileges to a user', async () => {
const { child, promise } = immichAdmin(['grant-admin']);
let data = '';
child.stdout.on('data', (chunk) => {
data += chunk;
if (data.includes('Please enter the user email:')) {
child.stdin.end('admin@immich.cloud\n');
}
});
const { stdout, exitCode } = await promise;
expect(exitCode).toBe(0);
expect(stdout).toContain('Admin access has been granted to');
});
});
describe('list-users', () => {
it('should list the admin user', async () => {
const { stdout, exitCode } = await immichAdmin(['list-users']).promise;

View File

@@ -116,7 +116,6 @@ export const deviceDto = {
createdAt: expect.any(String),
updatedAt: expect.any(String),
current: true,
isPendingSyncReset: false,
deviceOS: '',
deviceType: '',
},

View File

@@ -12,7 +12,6 @@ export enum OAuthUser {
NO_NAME = 'no-name',
WITH_QUOTA = 'with-quota',
WITH_USERNAME = 'with-username',
WITH_ROLE = 'with-role',
}
const claims = [
@@ -35,12 +34,6 @@ const claims = [
preferred_username: 'user-quota',
immich_quota: 25,
},
{
sub: OAuthUser.WITH_ROLE,
email: 'oauth-with-role@immich.app',
email_verified: true,
immich_role: 'admin',
},
];
const withDefaultClaims = (sub: string) => ({
@@ -71,15 +64,7 @@ const setup = async () => {
claims: {
openid: ['sub'],
email: ['email', 'email_verified'],
profile: [
'name',
'given_name',
'family_name',
'preferred_username',
'immich_quota',
'immich_username',
'immich_role',
],
profile: ['name', 'given_name', 'family_name', 'preferred_username', 'immich_quota', 'immich_username'],
},
features: {
jwtUserinfo: {

View File

@@ -60,7 +60,6 @@ import { io, type Socket } from 'socket.io-client';
import { loginDto, signupDto } from 'src/fixtures';
import { makeRandomImage } from 'src/generators';
import request from 'supertest';
export type { Emitter } from '@socket.io/component-emitter';
type CommandResponse = { stdout: string; stderr: string; exitCode: number | null };
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete' | 'assetHidden';
@@ -85,10 +84,10 @@ export const immichAdmin = (args: string[]) =>
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
export const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
const executeCommand = (command: string, args: string[], options?: { cwd?: string }) => {
const executeCommand = (command: string, args: string[]) => {
let _resolve: (value: CommandResponse) => void;
const promise = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
const child = spawn(command, args, { stdio: 'pipe', cwd: options?.cwd });
const child = spawn(command, args, { stdio: 'pipe' });
let stdout = '';
let stderr = '';
@@ -154,19 +153,19 @@ export const utils = {
tables = tables || [
// TODO e2e test for deleting a stack, since it is quite complex
'stack',
'library',
'shared_link',
'asset_stack',
'libraries',
'shared_links',
'person',
'album',
'asset',
'asset_face',
'albums',
'assets',
'asset_faces',
'activity',
'api_key',
'session',
'user',
'api_keys',
'sessions',
'users',
'system_metadata',
'tag',
'tags',
];
const sql: string[] = [];
@@ -175,7 +174,7 @@ export const utils = {
if (table === 'system_metadata') {
sql.push(`DELETE FROM "system_metadata" where "key" NOT IN ('reverse-geocoding-state', 'system-flags');`);
} else {
sql.push(`DELETE FROM "${table}" CASCADE;`);
sql.push(`DELETE FROM ${table} CASCADE;`);
}
}
@@ -451,7 +450,7 @@ export const utils = {
return;
}
await client.query('INSERT INTO asset_face ("assetId", "personId") VALUES ($1, $2)', [assetId, personId]);
await client.query('INSERT INTO asset_faces ("assetId", "personId") VALUES ($1, $2)', [assetId, personId]);
},
setPersonThumbnail: async (personId: string) => {

View File

@@ -1,89 +0,0 @@
import { getUserAdmin } from '@immich/sdk';
import { expect, test } from '@playwright/test';
import { asBearerAuth, utils } from 'src/utils';
test.describe('User Administration', () => {
test.beforeAll(() => {
utils.initSdk();
});
test.beforeEach(async () => {
await utils.resetDatabase();
});
test('validate admin/users link', async ({ context, page }) => {
const admin = await utils.adminSetup();
await utils.setAuthCookies(context, admin.accessToken);
// Navigate to user management page and verify title and header
await page.goto(`/admin/users`);
await expect(page).toHaveTitle(/User Management/);
await expect(page.getByText('User Management')).toBeVisible();
});
test('create user', async ({ context, page }) => {
const admin = await utils.adminSetup();
await utils.setAuthCookies(context, admin.accessToken);
// Create a new user
await page.goto('/admin/users');
await page.getByRole('button', { name: 'Create user' }).click();
await page.getByLabel('Email').fill('user@immich.cloud');
await page.getByLabel('Password', { exact: true }).fill('password');
await page.getByLabel('Confirm Password').fill('password');
await page.getByLabel('Name').fill('Immich User');
await page.getByRole('button', { name: 'Create', exact: true }).click();
// Verify the user exists in the user list
await page.getByRole('row', { name: 'user@immich.cloud' });
});
test('promote to admin', async ({ context, page }) => {
const admin = await utils.adminSetup();
await utils.setAuthCookies(context, admin.accessToken);
const user = await utils.userSetup(admin.accessToken, {
name: 'Admin 2',
email: 'admin2@immich.cloud',
password: 'password',
});
expect(user.isAdmin).toBe(false);
await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit user' }).click();
await expect(page.getByLabel('Admin User')).not.toBeChecked();
await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).toBeChecked();
await page.getByRole('button', { name: 'Confirm' }).click();
const updated = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(updated.isAdmin).toBe(true);
});
test('revoke admin access', async ({ context, page }) => {
const admin = await utils.adminSetup();
await utils.setAuthCookies(context, admin.accessToken);
const user = await utils.userSetup(admin.accessToken, {
name: 'Admin 2',
email: 'admin2@immich.cloud',
password: 'password',
isAdmin: true,
});
expect(user.isAdmin).toBe(true);
await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit user' }).click();
await expect(page.getByLabel('Admin User')).toBeChecked();
await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).not.toBeChecked();
await page.getByRole('button', { name: 'Confirm' }).click();
const updated = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(updated.isAdmin).toBe(false);
});
});

View File

@@ -4,7 +4,6 @@
"account_settings": "Rekeninginstellings",
"acknowledge": "Erken",
"action": "Aksie",
"action_common_update": "Opdateur",
"actions": "Aksies",
"active": "Aktief",
"activity": "Aktiwiteite",
@@ -14,7 +13,6 @@
"add_a_location": "Voeg 'n ligging by",
"add_a_name": "Voeg 'n naam by",
"add_a_title": "Voeg 'n titel by",
"add_endpoint": "Voeg Koppelvlakpunt by",
"add_exclusion_pattern": "Voeg uitsgluitingspatrone by",
"add_import_path": "Voeg invoerpad by",
"add_location": "Voeg ligging by",
@@ -22,30 +20,27 @@
"add_partner": "Voeg vennoot by",
"add_path": "Voeg pad by",
"add_photos": "Voeg foto's by",
"add_tag": "Voeg tag by",
"add_to": "Voeg by…",
"add_to_album": "Voeg na album",
"add_to_album_bottom_sheet_added": "By {album} bygevoeg",
"add_to_album_bottom_sheet_already_exists": "Reeds in {album}",
"add_to_shared_album": "Voeg toe aan gedeelde album",
"add_to_shared_album": "Voeg na gedeelde album",
"add_url": "Voeg URL by",
"added_to_archive": "By argief toegevoegd",
"added_to_favorites": "By gunstelinge toegevoegd",
"added_to_favorites_count": "Het {count, number} by gunstelinge toegevoegd",
"added_to_archive": "By argief gevoeg",
"added_to_favorites": "By gunstelinge gevoeg",
"added_to_favorites_count": "Het {count, number} by gunstelinge gevoeg",
"admin": {
"add_exclusion_pattern_description": "Voeg uitsluitingspatrone by. Globbing met *, ** en ? word ondersteun. Om alle lêers in enige lêergids genaamd \"Raw\" te ignoreer, gebruik \"**/Raw/**\". Om alle lêers wat op \".tif\" eindig, te ignoreer, gebruik \"**/*.tif\". Om 'n absolute pad te ignoreer, gebruik \"/path/to/ignore/**\".",
"admin_user": "Admin gebruiker",
"asset_offline_description": "Hierdie eksterne biblioteekbate word nie meer op skyf gevind nie en is na die asblik geskuif. As die lêer binne die biblioteek geskuif is, gaan jou tydlyn na vir die nuwe ooreenstemmende bate. Om hierdie bate te herstel, maak asseblief seker dat die lêerpad hieronder deur Immich verkry kan word en skandeer die biblioteek.",
"authentication_settings": "Verifikasie instellings",
"authentication_settings_description": "Bestuur wagwoord, OAuth en ander verifikasie instellings",
"authentication_settings_disable_all": "Is jy seker jy wil alle aanmeldmetodes deaktiveer? Aanmelding sal heeltemal gedeaktiveer word.",
"authentication_settings_reenable": "Om te heraktiveer, gebruik 'n <link>Server Command</link>.",
"background_task_job": "Agtergrondtake",
"backup_database": "Skep Datastortlêer",
"backup_database": "Rugsteun databasis",
"backup_database_enable_description": "Aktiveer databasisrugsteun",
"backup_keep_last_amount": "Aantal vorige rugsteune om te hou",
"backup_settings": "Rugsteun instellings",
"backup_settings_description": "Bestuur databasis rugsteun instellings.",
"backup_settings_description": "Bestuur databasis rugsteun instellings",
"check_all": "Kies Alles",
"cleared_jobs": "Poste gevee vir: {job}",
"config_set_by_file": "Config word tans deur 'n konfigurasielêer gestel",
"confirm_delete_library": "Is jy seker jy wil {library}-biblioteek uitvee?",
@@ -53,7 +48,6 @@
"confirm_email_below": "Om te bevestig, tik \"{email}\" hieronder",
"confirm_reprocess_all_faces": "Is jy seker jy wil alle gesigte herverwerk? Dit sal ook genoemde mense skoonmaak.",
"confirm_user_password_reset": "Is jy seker jy wil {user} se wagwoord terugstel?",
"confirm_user_pin_code_reset": "Is jy seker jy wil {user} se PIN kode herstel?",
"create_job": "Skep werk",
"cron_expression": "Cron uitdrukking",
"cron_expression_description": "Stel die skanderingsinterval in met die cron-formaat. Vir meer inligting verwys asseblief na bv. <link>Crontab Guru</link>",
@@ -61,16 +55,14 @@
"disable_login": "Deaktiveer aanmelding",
"duplicate_detection_job_description": "Begin masjienleer op bates om soortgelyke beelde op te spoor. Maak staat op Smart Search",
"exclusion_pattern_description": "Met uitsluitingspatrone kan jy lêers en vouers ignoreer wanneer jy jou biblioteek skandeer. Dit is nuttig as jy vouers het wat lêers bevat wat jy nie wil invoer nie, soos RAW-lêers.",
"external_library_created_at": "Eksterne biblioteek (geskep op {date})",
"external_library_management": "Eksterne Biblioteekbestuur",
"face_detection": "Gesig deteksie",
"face_detection_description": "Detecteer die gesigte in media deur middel van masjienleer. Vir videos word slegs die duimnaelskets oorweeg. “Herlaai” (ver)werk al die media weer. “Stel terug” verwyder boonop alle huidige gesigdata. “Onverwerk” plaas bates in die tou wat nog nie verwerk is nie. Gedekte gesigte sal ná voltooiing van Gesigdetectie vir Gesigherkenning in die tou geplaas word, om hulle in bestaande of nuwe persone te groepeer.",
"facial_recognition_job_description": "Groepeer gesigte in mense in. Die stap is vinniger nadat Gesig Deteksie klaar is. \"Herstel\" (her-)groepeer alle gesigte. \"Vermiste\" plaas gesigte in ry wat nie 'n persoon gekoppel het nie.",
"failed_job_command": "Opdrag {command} het misluk vir werk: {job}",
"force_delete_user_warning": "WAARSKUWING: Dit sal onmiddellik die gebruiker en alle bates verwyder. Dit kan nie ontdoen word nie en die lêers kan nie herstel word nie.",
"forcing_refresh_library_files": "Forseer herlaai van alle biblioteeklêers",
"image_format": "Formaat",
"image_format_description": "WebP produseer kleiner lêers as JPEG, maar is stadiger om te enkodeer.",
"image_fullsize_description": "Vol grote prent met geen metadata, gebruik wanner ingezoem",
"image_fullsize_enabled": "Skakel aan vol grote prent generasie",
"image_prefer_embedded_preview": "Verkies ingebedde voorskou",
"image_prefer_wide_gamut": "Verkies wide gamut",
"image_prefer_wide_gamut_setting_description": "Gebruik Display P3 vir kleinkiekies. Dit behou die lewendheid van beelde met wye kleurruimtes beter, maar beelde kan anders verskyn op ou apparate met 'n ou blaaierweergawe. sRGB-beelde gebruik steeds sRGB om kleurverskuiwings te voorkom.",
@@ -88,99 +80,8 @@
"job_concurrency": "{job} gelyktydigheid",
"job_created": "Taak gemaak",
"job_not_concurrency_safe": "Hierdie taak kan nie gelyktydig uitgevoer word nie.",
"job_settings": "Agtergrondtaakinstellings",
"job_settings_description": "Bestuur werkgelyktydigheid",
"job_status": "Werkstatus",
"library_created": "Biblioteek geskep: {library}",
"library_deleted": "Biblioteek verwyder",
"library_import_path_description": "Spesifiseer 'n leer om in te neem. Hierdie leer, en al die sub leers, gaan geskandeer for vir prente en videos.",
"library_scanning": "Periodieke Skandering",
"library_scanning_description": "Stel periodieke skandering van biblioteek in",
"library_scanning_enable_description": "Aktiveer periodieke biblioteekskandering",
"library_settings": "Eksterne Biblioteek",
"map_settings": "Kaart",
"migration_job": "Migrasie",
"oauth_settings": "OAuth",
"transcoding_acceleration_vaapi": "VAAPI"
"job_settings": "Agtergrondtaakinstellings"
},
"administration": "Administrasie",
"advanced": "Gevorderde",
"albums": "Albums",
"all": "Alle",
"anti_clockwise": "Anti-kloksgewys",
"archive": "Argief",
"asset_skipped": "Oorgeslaan",
"asset_uploaded": "Opgelaai",
"asset_uploading": "Oplaai…",
"assets": "Bates",
"back": "Terug",
"backward": "Agteruit",
"build": "Bou",
"camera": "Kamera",
"cancel": "Kanselleer",
"city": "Stad",
"clockwise": "Kloksgewys",
"close": "Maak toe",
"color": "Kleur",
"confirm": "Bevestig",
"contain": "Bevat",
"context": "Konteks",
"continue": "Gaan voort",
"country": "Land",
"cover": "Bedek",
"create": "Skep",
"created": "Geskep",
"dark": "Donker",
"day": "Dag",
"delete": "Verwyder",
"description": "Beskrywing",
"details": "Besonderhede",
"direction": "Rigting",
"discover": "Ontdek",
"documentation": "Dokumentasie",
"done": "Klaar",
"download": "Aflaai",
"download_settings": "Aflaai",
"duplicates": "Duplikate",
"duration": "Duur",
"edit": "Wysig",
"edited": "Gewysigd",
"search_by_description": "Soek by beskrywing",
"search_by_description_example": "Stapdag in Sapa",
"version": "Weergawe",
"version_announcement_closing": "Jou friend, Alex",
"version_history": "Weergawegeskiedenis",
"version_history_item": "{version} geinstaleerd op {date}",
"video": "Video",
"videos": "Video's",
"view": "Bekyk",
"view_album": "Bekyk Album",
"view_all": "Bekyk alle",
"view_all_users": "Bekyk alle gebruikers",
"view_in_timeline": "Bekyk in tydlyn",
"view_link": "Bekyk skakel",
"view_links": "Bekyk skakels",
"view_name": "Bekyk",
"view_next_asset": "Bekyk volgende bate",
"view_previous_asset": "Bekyk vorige bate",
"view_qr_code": "Bekyk QR-kode",
"view_stack": "Bekyk stapel",
"view_user": "Bekyk gebruiker",
"viewer_remove_from_stack": "Verwyder van stapel",
"viewer_stack_use_as_main_asset": "Gebruik as hoofbate",
"viewer_unstack": "Ontstapel",
"visibility_changed": "Sigbaarheid verander voor {count, plural, one {# person} other {# people}}",
"waiting": "Wag",
"warning": "Waaskuwing",
"week": "Week",
"welcome": "Welkom",
"welcome_to_immich": "Welkom by Immich",
"wifi_name": "Wi-Fi Naam",
"wrong_pin_code": "Verkeerde PIN-kode",
"year": "Jaar",
"years_ago": "{years, plural, one {# year} other {# years}} gelede",
"yes": "Ja",
"you_dont_have_any_shared_links": "Jy het geen gedeelde skakels",
"your_wifi_name": "Jou Wi-Fi naam",
"zoom_image": "Vergroot Prent"
"search_by_description_example": "Stapdag in Sapa"
}

File diff suppressed because it is too large Load Diff

View File

@@ -34,15 +34,18 @@
"backup_database_enable_description": "Verilənlər bazasının ehtiyat nüsxələrini aktiv et",
"backup_settings": "Ehtiyat Nüsxə Parametrləri",
"backup_settings_description": "Verilənlər bazasının ehtiyat nüsxə parametrlərini idarə et",
"check_all": "Hamısını yoxla",
"config_set_by_file": "Konfiqurasiya hal-hazırda konfiqurasiya faylı ilə təyin olunub",
"confirm_delete_library": "{library} kitabxanasını silmək istədiyinizdən əminmisiniz?",
"confirm_email_below": "Təsdiqləmək üçün aşağıya {email} yazın",
"confirm_user_password_reset": "{user} adlı istifadəçinin şifrəsini sıfırlamaq istədiyinizdən əminmisiniz?",
"disable_login": "Giriş etməni söndür",
"duplicate_detection_job_description": "Bənzər şəkilləri tapmaq üçün maşın öyrənməsini işə salın. Bu prosses Smart Search funksiyasına əsaslanır",
"external_library_created_at": "Xarici kitabxana ({date} (tarixində yaradıldı)",
"external_library_management": "Xarici kitabxana idarəetməsi",
"face_detection": "Üz tanıma",
"force_delete_user_warning": "XƏBƏRDARLIQ: Bu əməliyyat istifadəçi və bütün məlumatları siləcəkdir. Bu prossesi və silinən faylları geri qaytarmaq olmaz.",
"forcing_refresh_library_files": "Bütün kitabxana fayllarını məcburi yeniləmə",
"image_format_description": "WebP, JPEG faylına görə daha kiçik həcmə sahibdir, lakin onu kodlaşdırmaq daha çox vaxt alır.",
"image_preview_title": "Önizləmə parametrləri",
"image_quality": "Keyfiyyət",

View File

@@ -22,7 +22,6 @@
"add_partner": "Дадаць партнёра",
"add_path": "Дадаць шлях",
"add_photos": "Дадаць фота",
"add_tag": "Дадаць тэг",
"add_to": "Дадаць у…",
"add_to_album": "Дадаць у альбом",
"add_to_album_bottom_sheet_added": "Дададзена да {album}",
@@ -34,39 +33,41 @@
"added_to_favorites_count": "Дададзена {count, number} да абранага",
"admin": {
"add_exclusion_pattern_description": "Дадайце шаблоны выключэнняў. Падтрымліваецца выкарыстанне сімвалаў * , ** і ?. Каб ігнараваць усе файлы ў любой дырэкторыі з назвай \"Raw\", выкарыстоўвайце \"**/Raw/**\". Каб ігнараваць усе файлы, якія заканчваюцца на \".tif\", выкарыстоўвайце \"**/.tif\". Каб ігнараваць абсолютны шлях, выкарыстоўвайце \"/path/to/ignore/**\".",
"admin_user": "Адміністратар",
"asset_offline_description": "Гэты знешні бібліятэчны актыў больш не знойдзены на дыску і быў перамешчаны ў сметніцу. Калі файл быў перамешчаны ў межах бібліятэкі, праверце вашу хроніку для новага адпаведнага актыва. Каб аднавіць гэты актыў, пераканайцеся, што шлях да файла ніжэй даступны для Immich і адскануйце бібліятэку.",
"authentication_settings": "Налады праверкі сапраўднасці",
"authentication_settings_description": "Кіраванне паролямі, OAuth, і іншыя налады праверкі сапраўднасці",
"authentication_settings_disable_all": "Вы ўпэўнены, што жадаеце адключыць усе спосабы логіну? Логін будзе цалкам адключаны.",
"authentication_settings_reenable": "Каб зноў уключыць, выкарыстайце <link>Каманду сервера</link>.",
"background_task_job": "Фонавыя заданні",
"backup_database": "Стварыць рэзервовую копію базы даных",
"backup_database": "Рэзервовая копія базы даных",
"backup_database_enable_description": "Уключыць рэзерваванне базы даных",
"backup_keep_last_amount": "Колькасць папярэдніх рэзервовых копій для захавання",
"backup_settings": "Налады рэзервовага капіявання",
"backup_settings_description": "Кіраванне наладамі рэзервавання базы даных.",
"backup_settings_description": "Кіраванне наладамі дампа базы дадзеных. Заўвага: гэтыя задачы не кантралююцца, і ў выпадку няўдачы паведамленне адпраўлена не будзе.",
"check_all": "Праверыць усе",
"cleanup": "Ачыстка",
"cleared_jobs": "Ачышчаны заданні для: {job}",
"config_set_by_file": "Канфігурацыя зараз усталявана праз файл канфігурацыі",
"confirm_delete_library": "Вы ўпэўнены што жадаеце выдаліць бібліятэку {library}?",
"config_set_by_file": "Канфігурацыя ў зараз усталявана праз файл канфігурацыі",
"confirm_delete_library": "Вы ўпэўнены што жадаеце выдаліць {library} бібліятэку?",
"confirm_delete_library_assets": "Вы ўпэўнены, што хочаце выдаліць гэтую бібліятэку? Гэта прывядзе да выдалення {count, plural, one {# актыву} other {усіх # актываў}}, якія змяшчаюцца ў Immich, і гэта дзеянне немагчыма будзе адмяніць. Файлы застануцца на дыску.",
"confirm_email_below": "Каб пацвердзіць, увядзіце \"{email}\" ніжэй",
"confirm_reprocess_all_faces": "Вы ўпэўнены, што хочаце пераапрацаваць усе твары? Гэта таксама прывядзе да выдалення імя людзей.",
"confirm_user_password_reset": "Вы ўпэўнены ў тым, што жадаеце скінуць пароль {user}?",
"confirm_user_pin_code_reset": "Вы ўпэўнены ў тым, што жадаеце скінуць PIN-код {user}?",
"create_job": "Стварыць заданне",
"cron_expression": "Выраз Cron",
"cron_expression_description": "Усталюйце інтэрвал сканавання, выкарыстоўваючы фармат cron. Для атрымання дадатковай інфармацыі, калі ласка, звярніцеся, напрыклад, да <link>Crontab Guru</link>",
"cron_expression_presets": "Прадустаноўкі выразаў Cron",
"cron_expression_presets": "Прадустановкі выразаў Cron",
"disable_login": "Адключыць уваход",
"duplicate_detection_job_description": "Запусціць машыннае навучанне на актывах для выяўлення падобных выяў. Залежыць ад Smart Search",
"exclusion_pattern_description": "Шаблоны выключэння дазваляюць ігнараваць файлы і папкі пры сканаванні вашай бібліятэкі. Гэта карысна, калі ў вас ёсць папкі, якія змяшчаюць файлы, якія вы не хочаце імпартаваць, напрыклад, файлы RAW.",
"external_library_created_at": "Знешняя бібліятэка (створана {date})",
"external_library_management": "Кіраванне знешняй бібліятэкай",
"face_detection": "Выяўленне твараў",
"face_detection_description": "Выяўляць твары на фотаздымках і відэа з дапамогай машыннага навучання. Для відэа ўлічваецца толькі мініяцюра. \"Абнавіць\" (пера)апрацоўвае ўсе медыя. \"Скінуць\" дадаткова ачышчае ўсе бягучыя дадзеныя пра твары. \"Адсутнічае\" ставіць у чаргу медыя, якія яшчэ не былі апрацаваныя. Выяўленыя твары будуць пастаўлены ў чаргу для распазнавання асоб пасля завяршэння выяўлення твараў, з групаваннем іх па існуючых або новых людзях.",
"facial_recognition_job_description": "Групаваць выяўленыя твары па асобах. Гэты этап выконваецца пасля завяршэння выяўлення твараў. \"Скінуць\" (паўторна) перагрупоўвае ўсе твары. \"Адсутнічае\" ставіць у чаргу твары, якія яшчэ не прыпісаныя да якой-небудзь асобы.",
"failed_job_command": "Каманда {command} не выканалася для задання: {job}",
"force_delete_user_warning": "ПАПЯРЭДЖАННЕ: Гэта дзеянне неадкладна выдаліць карыстальніка і ўсе аб'екты. Гэта дзеянне не можа быць адроблена і файлы немагчыма будзе аднавіць.",
"forcing_refresh_library_files": "Прымусовае абнаўленне ўсіх файлаў бібліятэкі",
"image_format": "Фармат",
"image_format_description": "WebP стварае меншыя файлы, чым JPEG, але павольней кадуе.",
"image_fullsize_description": "Выява ў поўным памеры без метаданых, выкарыстоўваецца пры павелічэнні",
@@ -74,272 +75,15 @@
"image_fullsize_enabled_description": "Ствараць выяву ў поўным памеры для фарматаў, што не прыдатныя для вэб. Калі ўключана опцыя \"Аддаваць перавагу ўбудаванай праяве\", прагляды выкарыстоўваюцца непасрэдна без канвертацыі. Не ўплывае на вэб-прыдатныя фарматы, такія як JPEG.",
"image_fullsize_quality_description": "Якасць выявы ў поўным памеры ад 1 да 100. Больш высокае значэнне лепшае, але прыводзіць да павелічэння памеру файла.",
"image_fullsize_title": "Налады выявы ў поўным памеры",
"image_prefer_embedded_preview_setting_description": "Выкарыстоўваць убудаваныя праявы ў RAW-фотаздымках ў якасці ўваходных дадзеных для апрацоўкі малюнкаў, калі магчыма. Гэта дазваляе атрымаць больш дакладныя колеры для некаторых відарысаў, але ж якасць праяў залежыць ад камеры, і на відарысе можа быць больш артэфактаў сціску.",
"image_prefer_wide_gamut": "Аддаць перавагу шырокай гаме",
"image_preview_title": "Налады папярэдняга прагляду",
"image_quality": "Якасць",
"image_resolution": "Раздзяляльнасць",
"image_settings": "Налады відарыса",
"image_settings_description": "Кіруйце якасцю і раздзяляльнасцю сгенерыраваных відарысаў",
"library_created": "Створана бібліятэка: {library}",
"library_deleted": "Бібліятэка выдалена",
"map_dark_style": "Цёмны стыль",
"map_enable_description": "Уключыць функцыі карты",
"map_gps_settings": "Налады карты і GPS",
"map_light_style": "Светлы стыль",
"map_settings": "Карта",
"map_settings_description": "Кіраванне наладамі карты",
"map_style_description": "URL-адрас style.json тэмы карты",
"metadata_settings": "Налады метаданых",
"oauth_button_text": "Тэкст кнопкі",
"oauth_settings": "OAuth",
"system_settings": "Сістэмныя налады",
"theme_settings": "Налады тэмы",
"transcoding_acceleration_vaapi": "VAAPI",
"transcoding_audio_codec": "Аудыякодэк",
"transcoding_video_codec": "Відэакодэк",
"trash_settings": "Налады сметніцы",
"trash_settings_description": "Кіраванне наладамі сметніцы",
"version_check_settings": "Праверка версіі",
"version_check_settings_description": "Уключыць/адключыць апавяшчэнні аб новай версіі"
"image_settings_description": "Кіруйце якасцю і раздзяляльнасцю сгенерыраваных відарысаў"
},
"advanced_settings_troubleshooting_title": "Выпраўленне непаладак",
"album_added": "Альбом дададзены",
"album_name": "Назва альбома",
"album_remove_user": "Выдаліць карыстальніка?",
"album_updated": "Альбом абноўлены",
"albums": "Альбомы",
"all": "Усе",
"all_albums": "Усе альбомы",
"all_people": "Усе людзі",
"all_videos": "Усе відэа",
"app_bar_signout_dialog_ok": "Так",
"app_bar_signout_dialog_title": "Выйсці",
"app_settings": "Налады праграмы",
"archive": "Архіў",
"archive_size": "Памер архіва",
"asset_uploading": "Запампоўванне…",
"back": "Назад",
"backup_all": "Усе",
"backup_controller_page_background_wifi": "Толькі праз Wi-Fi",
"buy": "Купіць Immich",
"cache_settings_clear_cache_button": "Ачысціць кэш",
"cache_settings_tile_title": "Лакальнае сховішча",
"cancel": "Скасаваць",
"cancel_search": "Скасаваць пошук",
"canceled": "Скасавана",
"city": "Горад",
"clear": "Ачысціць",
"clear_all": "Ачысціць усё",
"client_cert_dialog_msg_confirm": "ОК",
"client_cert_enter_password": "Увядзіце пароль",
"client_cert_import": "Імпарт",
"close": "Закрыць",
"collapse": "Згарнуць",
"collapse_all": "Згарнуць усё",
"color": "Колер",
"color_theme": "Колеравая тэма",
"continue": "Працягнуць",
"control_bottom_app_bar_create_new_album": "Стварыць новы альбом",
"control_bottom_app_bar_delete_from_immich": "Выдаліць з Immich",
"control_bottom_app_bar_delete_from_local": "Выдаліць з прылады",
"control_bottom_app_bar_edit_location": "Рэдагаваць месцазнаходжанне",
"country": "Краіна",
"cover": "Вокладка",
"covers": "Вокладкі",
"create": "Стварыць",
"create_album": "Стварыць альбом",
"create_album_page_untitled": "Без назвы",
"create_library": "Стварыць бібліятэку",
"create_link": "Стварыць спасылку",
"create_new_user": "Стварыць новага карыстальніка",
"create_tag": "Стварыць тэг",
"create_user": "Стварыць карыстальніка",
"dark": "Цёмная",
"day": "Дзень",
"delete": "Выдаліць",
"delete_album": "Выдаліць альбом",
"delete_dialog_ok_force": "Усё адно выдаліць",
"delete_dialog_title": "Выдаліць назаўжды",
"delete_face": "Выдаліць твар",
"delete_key": "Выдаліць ключ",
"delete_library": "Выдаліць бібліятэку",
"delete_link": "Выдаліць спасылку",
"delete_local_dialog_ok_force": "Усё адно выдаліць",
"delete_others": "Выдаліць іншыя",
"delete_tag": "Выдаліць тэг",
"delete_user": "Выдаліць карыстальніка",
"discord": "Discord",
"documentation": "Дакументацыя",
"done": "Гатова",
"download": "Спампаваць",
"download_canceled": "Спампоўванне скасавана",
"download_complete": "Спампоўванне завершана",
"download_enqueue": "Спампоўванне дададзена ў чаргу",
"downloading": "Спампоўванне",
"edit": "Рэдагаваць",
"edit_album": "Рэдагаваць альбом",
"edit_avatar": "Рэдагаваць аватар",
"edit_date": "Рэдагаваць дату",
"edit_date_and_time": "Рэдагаваь дату і час",
"edit_description": "Рэдагаваць апісанне",
"edit_description_prompt": "Выберыце новае апісанне:",
"edit_faces": "Рэдагаваць твары",
"edit_import_path": "Рэдагаваць шлях імпарту",
"edit_import_paths": "Рэдагаваць шляхі імпарту",
"edit_key": "Рэдагаваць ключ",
"edit_link": "Рэдагаваць спасылку",
"edit_location": "Рэдагаваць месцазнаходжанне",
"edit_location_dialog_title": "Месцазнаходжанне",
"edit_name": "Рэдагаваць назву",
"edit_people": "Рэдагаваць людзей",
"edit_tag": "Рэдагаваць тэг",
"edit_title": "Рэдагаваць загаловак",
"edit_user": "Рэдагаваць карыстальніка",
"edited": "Адрэдагавана",
"editor": "Рэдактар",
"editor_close_without_save_prompt": "Змены не будуць захаваны",
"editor_close_without_save_title": "Закрыць рэдактар?",
"editor_crop_tool_h2_aspect_ratios": "Суадносіны бакоў",
"editor_crop_tool_h2_rotation": "Паварот",
"error": "Памылка",
"error_saving_image": "Памылка: {error}",
"exif": "Exif",
"exif_bottom_sheet_description": "Дадаць апісанне...",
"favorite": "У абраным",
"favorite_or_unfavorite_photo": "Дадаць або выдаліць фота з абранага",
"favorites": "Абраныя",
"file_name": "Назва файла",
"filename": "Назва файла",
"filetype": "Тып файла",
"filter": "Фільтр",
"forward": "Наперад",
"gcast_enabled": "Google Cast",
"general": "Агульныя",
"go_back": "Назад",
"go_to_folder": "Перайсці да папкі",
"hi_user": "Вітаем, {name} ({email})",
"hide_all_people": "Схаваць усіх людзей",
"hide_gallery": "Схаваць галерэю",
"hide_named_person": "Схаваць {name}",
"hide_password": "Схаваць пароль",
"hide_person": "Схаваць чалавека",
"image_viewer_page_state_provider_download_started": "Спампоўванне пачалося",
"immich_logo": "Лагатып Immich",
"interval": {
"day_at_onepm": "Кожны дзень а 13-й гадзіне",
"hours": "{hours, plural, one {Кожную гадзіну} few {Кожныя {hours, number} гадзіны} many {Кожныя {hours, number} гадзін} other {Кожныя {hours, number} гадзін}}",
"night_at_midnight": "Кожную ноч апоўначы",
"night_at_twoam": "Кожную ноч а 2-й гадзіне"
},
"language": "Мова",
"library": "Бібліятэка",
"light": "Светлая",
"login_form_back_button_text": "Назад",
"login_form_email_hint": "youremail@email.com",
"login_form_endpoint_hint": "http://your-server-ip:port",
"login_form_password_hint": "пароль",
"login_form_save_login": "Заставацца ў сістэме",
"main_menu": "Галоўнае меню",
"map_location_dialog_yes": "Так",
"map_settings_dark_mode": "Цёмны рэжым",
"map_settings_date_range_option_day": "Апошнія 24 гадзіны",
"map_settings_date_range_option_days": "Апошніх дзён: {days}",
"map_settings_date_range_option_year": "Апошні год",
"map_settings_date_range_option_years": "Апошніх год: {years}",
"map_settings_dialog_title": "Налады карты",
"map_settings_theme_settings": "Тэма карты",
"menu": "Меню",
"minute": "Хвіліна",
"month": "Месяц",
"monthly_title_text_date_format": "MMMM y",
"my_albums": "Мае альбомы",
"name": "Імя",
"name_or_nickname": "Імя або псеўданім",
"next": "Далей",
"no": "Не",
"offline": "Па-за сеткай",
"ok": "ОК",
"online": "У сетцы",
"open": "Адкрыць",
"or": "або",
"partner_list_user_photos": "Фота карыстальніка {user}",
"pause": "Прыпыніць",
"people": "Людзі",
"permission_onboarding_back": "Назад",
"permission_onboarding_continue_anyway": "Усё адно працягнуць",
"photos": "Фота",
"photos_and_videos": "Фота і відэа",
"place": "Месца",
"places": "Месцы",
"port": "Порт",
"previous": "Папярэдняе",
"profile": "Профіль",
"profile_drawer_app_logs": "Журналы",
"profile_drawer_github": "GitHub",
"purchase_button_buy": "Купіць",
"purchase_button_buy_immich": "Купіць Immich",
"purchase_button_select": "Выбраць",
"remove": "Выдаліць",
"remove_from_album": "Выдаліць з альбома",
"remove_from_favorites": "Выдаліць з абраных",
"remove_tag": "Выдаліць тэг",
"remove_url": "Выдаліць URL-адрас",
"remove_user": "Выдаліць карыстальніка",
"rename": "Перайменаваць",
"repository": "Рэпазіторый",
"reset": "Скінуць",
"reset_password": "Скінуць пароль",
"restore": "Аднавіць",
"restore_all": "Аднавіць усё",
"restore_user": "Аднавіць карыстальніка",
"resume": "Узнавіць",
"role": "Роля",
"role_editor": "Рэдактар",
"role_viewer": "Глядач",
"save": "Захаваць",
"save_to_gallery": "Захаваць у галерэю",
"search_filter_date": "Дата",
"search_filter_location": "Месцазнаходжанне",
"search_filter_location_title": "Выберыце месцазнаходжанне",
"search_filter_media_type": "Тып медыя",
"search_filter_media_type_title": "Выберыце тып медыя",
"search_page_screenshots": "Здымкі экрана",
"search_page_selfies": "Сэлфі",
"search_page_things": "Рэчы",
"search_page_your_map": "Ваша карта",
"second": "Секунда",
"send_message": "Адправіць паведамленне",
"setting_languages_apply": "Ужыць",
"setting_notifications_notify_never": "ніколі",
"settings": "Налады",
"share_add_photos": "Дадаць фота",
"shared_album_section_people_title": "ЛЮДЗІ",
"shared_link_info_chip_metadata": "EXIF",
"sharing_page_empty_list": "ПУСТЫ СПІС",
"sign_out": "Выйсці",
"sign_up": "Зарэгістравацца",
"size": "Памер",
"sort_title": "Загаловак",
"source": "Крыніца",
"tag": "Тэг",
"tags": "Тэгі",
"theme": "Тэма",
"theme_selection": "Выбар тэмы",
"timeline": "Хроніка",
"total": "Усяго",
"trash": "Сметніца",
"trash_page_delete_all": "Выдаліць усе",
"trash_page_restore_all": "Аднавіць усе",
"trash_page_title": "Сметніца ({count})",
"type": "Тып",
"undo": "Адрабіць",
"upload": "Запампаваць",
"upload_status_errors": "Памылкі",
"uploading": "Запампоўванне",
"url": "URL-адрас",
"user": "Карыстальнік",
"user_has_been_deleted": "Гэты карыстальнік быў выдалены.",
"user_id": "ID карыстальніка",
"user_purchase_settings": "Купля",
"user_purchase_settings_description": "Кіруйце пакупкамі",
@@ -372,14 +116,14 @@
"view_next_asset": "Паказаць наступны аб'ект",
"view_previous_asset": "Праглядзець папярэдні аб'ект",
"view_stack": "Прагляд стэка",
"visibility_changed": "Бачнасць змянілася для {count, plural, one {# чалавека} other {# чалавек}}",
"visibility_changed": "Відзімасць змянілася для {count, plural, one {# чалавек(-аў)} астатніх {# чалавек}}",
"waiting": "Чакаюць",
"warning": "Папярэджанне",
"week": "Тыдзень",
"welcome": "Вітаем",
"welcome_to_immich": "Вітаем у Immich",
"year": "Год",
"years_ago": "{years, plural, one {# год} few {# гады} many {# гадоў} other {# гадоў}} таму",
"years_ago": "{years, plural, one {# год} other {# гадоў}} таму",
"yes": "Так",
"you_dont_have_any_shared_links": "У вас няма абагуленых спасылак",
"zoom_image": "Павялічыць відарыс"

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More