Compare commits

..

1 Commits

Author SHA1 Message Date
Alex Tran
7277ea3d7a feat(server): asset_user table 2025-01-28 22:04:21 -06:00
2207 changed files with 102601 additions and 162874 deletions

2
.devcontainer/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.env
library

16
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:9791f4aa527774bc370c6bd2f6705ce5a686f1e6f204badd8dfaacce28c631ae
FROM ${BASEIMAGE}
# Flutter SDK
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
ENV FLUTTER_CHANNEL="stable"
ENV FLUTTER_VERSION="3.24.5"
ENV FLUTTER_HOME=/flutter
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
# Flutter SDK
RUN mkdir -p ${FLUTTER_HOME} \
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
&& rm flutter.tar.xz \
&& chown -R 1000:1000 ${FLUTTER_HOME}

View File

@@ -1,67 +1,26 @@
{ {
"name": "Immich - Backend, Frontend and ML", "name": "Immich",
"service": "immich-server", "service": "immich-devcontainer",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [ "dockerComposeFile": [
"../docker/docker-compose.dev.yml", "docker-compose.yml",
"./server/container-compose-overrides.yml" "../docker/docker-compose.dev.yml"
], ],
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode", "esbenp.prettier-vscode",
"svelte.svelte-vscode", "svelte.svelte-vscode"
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
] ]
} }
}, },
"forwardPorts": [3000, 9231, 9230, 2283], "forwardPorts": [],
"portsAttributes": { "initializeCommand": "bash .devcontainer/scripts/initializeCommand.sh",
"3000": { "onCreateCommand": "bash .devcontainer/scripts/onCreateCommand.sh",
"label": "Immich - Frontend HTTP",
"description": "The frontend of the Immich project",
"onAutoForward": "openBrowserOnce"
},
"2283": {
"label": "Immich - API Server - HTTP",
"description": "The API server of the Immich project"
},
"9231": {
"label": "Immich - API Server - DEBUG",
"description": "The API server of the Immich project"
},
"9230": {
"label": "Immich - Workers - DEBUG",
"description": "The workers of the Immich project"
}
},
"overrideCommand": true, "overrideCommand": true,
"workspaceFolder": "/workspaces/immich", "workspaceFolder": "/immich",
"remoteUser": "node", "remoteUser": "node"
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
} }

View File

@@ -0,0 +1,8 @@
services:
immich-devcontainer:
build:
dockerfile: Dockerfile
extra_hosts:
- 'host.docker.internal:host-gateway'
volumes:
- ..:/immich:cached

View File

@@ -1,34 +0,0 @@
services:
immich-server:
build:
target: dev-container-mobile
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override # bind mount host to /workspaces/immich
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION}/photos:/workspaces/immich/server/upload
- ${UPLOAD_LOCATION}/photos/upload:/workspaces/immich/server/upload/upload
- /etc/localtime:/etc/localtime:ro
database:
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
# UPLOAD_LOCATION must be set to a absolute path or vol-upload
vol-upload:
# DB_DATA_LOCATION must be set to a absolute path or vol-database
vol-database:

View File

@@ -1,52 +0,0 @@
{
"name": "Immich - Mobile",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../../docker/docker-compose.dev.yml",
"./container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [],
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -0,0 +1,6 @@
#!/bin/bash
# If .env file does not exist, create it by copying example.env from the docker folder
if [ ! -f ".devcontainer/.env" ]; then
cp docker/example.env .devcontainer/.env
fi

View File

@@ -0,0 +1,25 @@
#!/bin/bash
# Enable multiarch for arm64 if necessary
if [ "$(dpkg --print-architecture)" = "arm64" ]; then
sudo dpkg --add-architecture amd64 && \
sudo apt-get update && \
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
libc6:amd64 \
libstdc++6:amd64 \
libgcc1:amd64
fi
# Install DCM
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
sudo echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
sudo apt-get update
sudo apt-get install dcm
dart --disable-analytics
# Install immich
cd /immich || exit
make install-all

View File

@@ -1,82 +0,0 @@
#!/bin/bash
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
export DEV_PORT="${DEV_PORT:-3000}"
# search for immich directory inside workspace.
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
# Devcontainer: Clone [repository|pull request] in container volumne
WORKSPACES_DIR="/workspaces"
IMMICH_DIR="$WORKSPACES_DIR/immich"
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
log() {
# Display command on console, log with timestamp to file
echo "$*"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
}
run_cmd() {
# Ensure log directory exists
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
log "$@"
# Execute command: display normally on console, log with timestamps to file
"$@" 2>&1 | tee >(while IFS= read -r line; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
done)
# Preserve exit status
return "${PIPESTATUS[0]}"
}
# Find directories excluding /workspaces/immich
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
if [ ${#other_dirs[@]} -gt 1 ]; then
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
exit 1
elif [ ${#other_dirs[@]} -eq 1 ]; then
export IMMICH_WORKSPACE="${other_dirs[0]}"
else
export IMMICH_WORKSPACE="$IMMICH_DIR"
fi
log "Found immich workspace in $IMMICH_WORKSPACE"
log ""
fix_permissions() {
log "Fixing permissions for ${IMMICH_WORKSPACE}"
run_cmd sudo find "${IMMICH_WORKSPACE}/server/upload" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres/*" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres" -exec chown node {} +
# Change ownership for directories that exist
for dir in "${IMMICH_WORKSPACE}/.vscode" \
"${IMMICH_WORKSPACE}/cli/node_modules" \
"${IMMICH_WORKSPACE}/e2e/node_modules" \
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
"${IMMICH_WORKSPACE}/server/node_modules" \
"${IMMICH_WORKSPACE}/server/dist" \
"${IMMICH_WORKSPACE}/web/node_modules" \
"${IMMICH_WORKSPACE}/web/dist"; do
if [ -d "$dir" ]; then
run_cmd sudo chown node -R "$dir"
fi
done
log ""
}
install_dependencies() {
log "Installing dependencies"
(
cd "${IMMICH_WORKSPACE}" || exit 1
run_cmd make install-server
run_cmd make install-sdk
run_cmd make build-sdk
run_cmd make install-web
)
log ""
}

View File

@@ -1,48 +0,0 @@
services:
immich-server:
build:
target: dev-container-server
env_file: !reset []
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/workspaces/immich/server/upload
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/workspaces/immich/server/upload/upload
- /etc/localtime:/etc/localtime:ro
immich-web:
env_file: !reset []
immich-machine-learning:
env_file: !reset []
database:
env_file: !reset []
environment: !override
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
POSTGRES_USER: ${DB_USERNAME-postgres}
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
POSTGRES_INITDB_ARGS: '--data-checksums'
POSTGRES_HOST_AUTH_METHOD: md5
volumes:
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
redis:
env_file: !reset []
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
upload1-devcontainer-volume:
upload2-devcontainer-volume:
postgres-devcontainer-volume:

View File

@@ -1,17 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Starting Nest API Server"
log ""
cd "${IMMICH_WORKSPACE}/server" || (
log "Immich workspace not found"
exit 1
)
while true; do
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Starting Immich Web Frontend"
log ""
cd "${IMMICH_WORKSPACE}/web" || (
log "Immich Workspace not found"
exit 1
)
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
log "Waiting for api server..."
sleep 1
done
while true; do
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Setting up Immich dev container..."
fix_permissions
log "Installing npm dependencies (node_modules)..."
install_dependencies
log "Setup complete, please wait while backend and frontend services automatically start"
log
log "If necessary, the services may be manually started using"
log
log "$ /immich-devcontainer/container-start-backend.sh"
log "$ /immich-devcontainer/container-start-frontend.sh"
log
log "From different terminal windows, as these scripts automatically restart the server"
log "on error, and will continuously run in a loop"

6
.gitattributes vendored
View File

@@ -6,12 +6,6 @@ mobile/openapi/**/*.dart linguist-generated=true
mobile/lib/**/*.g.dart -diff -merge mobile/lib/**/*.g.dart -diff -merge
mobile/lib/**/*.g.dart linguist-generated=true mobile/lib/**/*.g.dart linguist-generated=true
mobile/lib/**/*.drift.dart -diff -merge
mobile/lib/**/*.drift.dart linguist-generated=true
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true open-api/typescript-sdk/fetch-client.ts linguist-generated=true

1
.github/.nvmrc vendored
View File

@@ -1 +0,0 @@
22.16.0

View File

@@ -1,5 +1,5 @@
title: '[Feature] feature-name-goes-here' title: "[Feature] feature-name-goes-here"
labels: ['feature'] labels: ["feature"]
body: body:
- type: markdown - type: markdown
@@ -11,9 +11,10 @@ body:
- type: checkboxes - type: checkboxes
attributes: attributes:
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request. label: I have searched the existing feature requests to make sure this is not a duplicate request.
options: options:
- label: 'Yes' - label: "Yes"
required: true
- type: textarea - type: textarea
id: feature id: feature

2
.github/FUNDING.yml vendored
View File

@@ -1 +1 @@
custom: ['https://buy.immich.app', 'https://immich.store'] custom: ['https://buy.immich.app']

View File

@@ -1,12 +1,6 @@
name: Report an issue with Immich name: Report an issue with Immich
description: Report an issue with Immich description: Report an issue with Immich
body: body:
- type: checkboxes
attributes:
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
options:
- label: 'Yes'
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
@@ -83,7 +77,7 @@ body:
id: repro id: repro
attributes: attributes:
label: Reproduction steps label: Reproduction steps
description: 'How do you trigger this bug? Please walk us through it step by step.' description: "How do you trigger this bug? Please walk us through it step by step."
value: | value: |
1. 1.
2. 2.
@@ -96,13 +90,12 @@ body:
id: logs id: logs
attributes: attributes:
label: Relevant log output label: Relevant log output
description: description: Please copy and paste any relevant logs below. (code formatting is
Please copy and paste any relevant logs below. (code formatting is
enabled, no need for backticks) enabled, no need for backticks)
render: shell render: shell
validations: validations:
required: false required: false
- type: textarea - type: textarea
attributes: attributes:
label: Additional information label: Additional information

View File

@@ -1 +1,2 @@
blank_issues_enabled: false
blank_pull_request_template_enabled: false blank_pull_request_template_enabled: false

View File

@@ -0,0 +1,22 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
## Screenshots (if appropriate):
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable

28
.github/package-lock.json generated vendored
View File

@@ -1,28 +0,0 @@
{
"name": ".github",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"devDependencies": {
"prettier": "^3.5.3"
}
},
"node_modules/prettier": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
}
}
}

View File

@@ -1,9 +0,0 @@
{
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.5.3"
}
}

View File

@@ -1,36 +0,0 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
<details><summary><h2>Screenshots (if appropriate)</h2></summary>
<!-- Images go below this line. -->
</details>
<!-- API endpoint changes (if relevant)
## API Changes
The `/api/something` endpoint is now `/api/something-else`
-->
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable
- [ ] I have no unrelated changes in the PR.
- [ ] I have confirmed that any new dependencies are strictly necessary.
- [ ] I have written tests for new code (if applicable)
- [ ] I have followed naming conventions/patterns in the surrounding code
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)

66
.github/release.yml vendored
View File

@@ -1,33 +1,33 @@
changelog: changelog:
categories: categories:
- title: 🚨 Breaking Changes - title: 🚨 Breaking Changes
labels: labels:
- changelog:breaking-change - changelog:breaking-change
- title: 🫥 Deprecated Changes - title: 🫥 Deprecated Changes
labels: labels:
- changelog:deprecated - changelog:deprecated
- title: 🔒 Security - title: 🔒 Security
labels: labels:
- changelog:security - changelog:security
- title: 🚀 Features - title: 🚀 Features
labels: labels:
- changelog:feature - changelog:feature
- title: 🌟 Enhancements - title: 🌟 Enhancements
labels: labels:
- changelog:enhancement - changelog:enhancement
- title: 🐛 Bug fixes - title: 🐛 Bug fixes
labels: labels:
- changelog:bugfix - changelog:bugfix
- title: 📚 Documentation - title: 📚 Documentation
labels: labels:
- changelog:documentation - changelog:documentation
- title: 🌐 Translations - title: 🌐 Translations
labels: labels:
- changelog:translation - changelog:translation

View File

@@ -7,15 +7,6 @@ on:
ref: ref:
required: false required: false
type: string type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request: pull_request:
push: push:
branches: [main] branches: [main]
@@ -24,56 +15,52 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
mobile: mobile:
- 'mobile/**' - 'mobile/**'
workflow:
- '.github/workflows/build-mobile.yml'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
build-sign-android: build-sign-android:
name: Build and sign Android name: Build and sign Android
needs: pre-job needs: pre-job
permissions:
contents: read
# Skip when PR from a fork # Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }} if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: macos-14 runs-on: macos-14
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Determine ref
with: id: get-ref
ref: ${{ inputs.ref || github.sha }} run: |
persist-credentials: false input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 - uses: actions/checkout@v4
with:
ref: ${{ steps.get-ref.outputs.ref }}
- uses: actions/setup-java@v4
with: with:
distribution: 'zulu' distribution: 'zulu'
java-version: '17' java-version: '17'
cache: 'gradle' cache: 'gradle'
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
@@ -89,14 +76,6 @@ jobs:
working-directory: ./mobile working-directory: ./mobile
run: flutter pub get run: flutter pub get
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Generate platform APIs
run: make pigeon
working-directory: ./mobile
- name: Build Android App Bundle - name: Build Android App Bundle
working-directory: ./mobile working-directory: ./mobile
env: env:
@@ -108,7 +87,7 @@ jobs:
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64 flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
- name: Publish Android Artifact - name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v4
with: with:
name: release-apk-signed name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk path: mobile/build/app/outputs/flutter-apk/*.apk

View File

@@ -8,38 +8,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
cleanup: cleanup:
name: Cleanup name: Cleanup
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Cleanup - name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: | run: |
gh extension install actions/gh-actions-cache gh extension install actions/gh-actions-cache
REPO=${{ github.repository }} REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys" echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 ) cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys. ## Setting this to not fail the workflow while deleting cache keys.
set +e set +e
echo "Deleting caches..." echo "Deleting caches..."
for cacheKey in $cacheKeysForPR for cacheKey in $cacheKeysForPR
do do
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done done
echo "Done" echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,7 @@ on:
- 'cli/**' - 'cli/**'
- '.github/workflows/cli.yml' - '.github/workflows/cli.yml'
pull_request: pull_request:
branches: [main]
paths: paths:
- 'cli/**' - 'cli/**'
- '.github/workflows/cli.yml' - '.github/workflows/cli.yml'
@@ -16,25 +17,21 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: CLI Publish name: CLI Publish
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 - uses: actions/setup-node@v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
@@ -52,25 +49,20 @@ jobs:
docker: docker:
name: Docker name: Docker
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish needs: publish
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 uses: docker/setup-buildx-action@v3.8.0
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@v3
if: ${{ !github.event.pull_request.head.repo.fork }} if: ${{ !github.event.pull_request.head.repo.fork }}
with: with:
registry: ghcr.io registry: ghcr.io
@@ -85,7 +77,7 @@ jobs:
- name: Generate docker image tags - name: Generate docker image tags
id: metadata id: metadata
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 uses: docker/metadata-action@v5
with: with:
flavor: | flavor: |
latest=false latest=false
@@ -96,7 +88,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }} type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image - name: Build and push image
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0 uses: docker/build-push-action@v6.12.0
with: with:
file: cli/Dockerfile file: cli/Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64

View File

@@ -9,14 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of # the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages. # supported CodeQL languages.
# #
name: 'CodeQL' name: "CodeQL"
on: on:
push: push:
branches: ['main'] branches: [ "main" ]
pull_request: pull_request:
# The branches below must be a subset of the branches above # The branches below must be a subset of the branches above
branches: ['main'] branches: [ "main" ]
schedule: schedule:
- cron: '20 13 * * 1' - cron: '20 13 * * 1'
@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
@@ -38,44 +36,43 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
language: ['javascript', 'python'] language: [ 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 uses: github/codeql-action/init@v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file. # By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file. # Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality # queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
# Command-line programs to run using the OS shell. # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# If the Autobuild fails above, remove it and uncomment the following three lines. # Command-line programs to run using the OS shell.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# - run: | # If the Autobuild fails above, remove it and uncomment the following three lines.
# echo "Run, Build Application using script" # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis # - run: |
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 # echo "Run, Build Application using script"
with: # ./location_of_script_within_repo/buildscript.sh
category: '/language:${{matrix.language}}'
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

73
.github/workflows/docker-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
# This workflow runs on certain conditions to check for and potentially
# delete container images from the GHCR which no longer have an associated
# code branch.
# Requires a PAT with the correct scope set in the secrets.
#
# This workflow will not trigger runs on forked repos.
name: Docker Cleanup
on:
pull_request:
types:
- "closed"
push:
paths:
- ".github/workflows/docker-cleanup.yml"
concurrency:
group: registry-tags-cleanup
cancel-in-progress: false
jobs:
cleanup-images:
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
is_org: "true"
do_delete: "true"
package_name: "${{ matrix.primary-name }}"
scheme: "pull_request"
repo_name: "immich"
match_regex: '^pr-(\d+)$|^(\d+)$'
cleanup-untagged-images:
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-24.04
needs:
- cleanup-images
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
- primary-name: "immich-build-cache"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
do_delete: "true"
is_org: "true"
package_name: "${{ matrix.primary-name }}"

View File

@@ -5,6 +5,7 @@ on:
push: push:
branches: [main] branches: [main]
pull_request: pull_request:
branches: [main]
release: release:
types: [published] types: [published]
@@ -12,23 +13,20 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
server: server:
@@ -38,156 +36,275 @@ jobs:
- 'i18n/**' - 'i18n/**'
machine-learning: machine-learning:
- 'machine-learning/**' - 'machine-learning/**'
workflow:
- '.github/workflows/docker.yml'
- '.github/workflows/multi-runner-build.yml'
- '.github/actions/image-build'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
retag_ml: retag_ml:
name: Re-Tag ML name: Re-Tag ML
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn'] suffix: ["", "-cuda", "-openvino", "-armnn"]
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env: run: |
REGISTRY_NAME: 'ghcr.io' REGISTRY_NAME="ghcr.io"
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }} TAG_OLD=main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }} TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }} docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
retag_server: retag_server:
name: Re-Tag Server name: Re-Tag Server
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
suffix: [''] suffix: [""]
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: | run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REGISTRY_NAME="ghcr.io"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REPOSITORY=${{ github.repository_owner }}/immich-server
TAG_OLD=main${{ matrix.suffix }}
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
machine-learning:
build_and_push_ml:
name: Build and Push ML name: Build and Push ML
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
env:
image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- platforms: linux/amd64,linux/arm64
device: cpu
- platforms: linux/amd64
device: cuda
suffix: -cuda
- platforms: linux/amd64
device: openvino
suffix: -openvino
- platforms: linux/arm64
device: armnn
suffix: -armnn
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.8.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.12.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
build_and_push_server:
name: Build and Push Server
runs-on: ubuntu-latest
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
image: immich-server
context: .
file: server/Dockerfile
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- device: cpu - platforms: linux/amd64,linux/arm64
tag-suffix: '' device: cpu
- device: cuda steps:
tag-suffix: '-cuda' - name: Checkout
platforms: linux/amd64 uses: actions/checkout@v4
- device: openvino
tag-suffix: '-openvino'
platforms: linux/amd64
- device: armnn
tag-suffix: '-armnn'
platforms: linux/arm64
- device: rknn
tag-suffix: '-rknn'
platforms: linux/arm64
- device: rocm
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
permissions:
contents: read
actions: read
packages: write
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with:
image: immich-machine-learning
context: machine-learning
dockerfile: machine-learning/Dockerfile
platforms: ${{ matrix.platforms }}
runner-mapping: ${{ matrix.runner-mapping }}
tag-suffix: ${{ matrix.tag-suffix }}
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=${{ matrix.device }}
server: - name: Set up QEMU
name: Build and Push Server uses: docker/setup-qemu-action@v3.3.0
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} - name: Set up Docker Buildx
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0 uses: docker/setup-buildx-action@v3.8.0
permissions:
contents: read - name: Login to Docker Hub
actions: read # Only push to Docker Hub when making a release
packages: write if: ${{ github.event_name == 'release' }}
secrets: uses: docker/login-action@v3
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} with:
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} username: ${{ secrets.DOCKERHUB_USERNAME }}
with: password: ${{ secrets.DOCKERHUB_TOKEN }}
image: immich-server
context: . - name: Login to GitHub Container Registry
dockerfile: server/Dockerfile uses: docker/login-action@v3
dockerhub-push: ${{ github.event_name == 'release' }} # Skip when PR from a fork
build-args: | if: ${{ !github.event.pull_request.head.repo.fork }}
DEVICE=cpu with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.12.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
success-check-server: success-check-server:
name: Docker Build & Push Server Success name: Docker Build & Push Server Success
needs: [server, retag_server] needs: [build_and_push_server, retag_server]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3 - name: Any jobs failed?
with: if: ${{ contains(needs.*.result, 'failure') }}
needs: ${{ toJSON(needs) }} run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
success-check-ml: success-check-ml:
name: Docker Build & Push ML Success name: Docker Build & Push ML Success
needs: [machine-learning, retag_ml] needs: [build_and_push_ml, retag_ml]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3 - name: Any jobs failed?
with: if: ${{ contains(needs.*.result, 'failure') }}
needs: ${{ toJSON(needs) }} run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

View File

@@ -3,6 +3,7 @@ on:
push: push:
branches: [main] branches: [main]
pull_request: pull_request:
branches: [main]
release: release:
types: [published] types: [published]
@@ -10,37 +11,27 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
docs: docs:
- 'docs/**' - 'docs/**'
workflow:
- '.github/workflows/docs-build.yml'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
build: build:
name: Docs Build name: Docs Build
needs: pre-job needs: pre-job
permissions:
contents: read
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@@ -49,12 +40,10 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './docs/.nvmrc' node-version-file: './docs/.nvmrc'
@@ -68,9 +57,8 @@ jobs:
run: npm run build run: npm run build
- name: Upload build output - name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v4
with: with:
name: docs-build-output name: docs-build-output
path: docs/build/ path: docs/build/
include-hidden-files: true
retention-days: 1 retention-days: 1

View File

@@ -1,7 +1,7 @@
name: Docs deploy name: Docs deploy
on: on:
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here workflow_run:
workflows: ['Docs build'] workflows: ["Docs build"]
types: types:
- completed - completed
@@ -9,9 +9,6 @@ jobs:
checks: checks:
name: Docs Deploy Checks name: Docs Deploy Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs: outputs:
parameters: ${{ steps.parameters.outputs.result }} parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }} artifact: ${{ steps.get-artifact.outputs.result }}
@@ -20,7 +17,7 @@ jobs:
run: echo 'The triggering workflow did not succeed' && exit 1 run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact - name: Get artifact
id: get-artifact id: get-artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
with: with:
script: | script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
@@ -38,9 +35,7 @@ jobs:
return { found: true, id: matchArtifact.id }; return { found: true, id: matchArtifact.id };
- name: Determine deploy parameters - name: Determine deploy parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
with: with:
script: | script: |
const eventType = context.payload.workflow_run.event; const eventType = context.payload.workflow_run.event;
@@ -62,8 +57,7 @@ jobs:
} else if (eventType == "pull_request") { } else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number; let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) { if(!pull_number) {
const {HEAD_SHA} = process.env; const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const items = response.data.items const items = response.data.items
if (items.length < 1) { if (items.length < 1) {
throw new Error("No pull request found for the commit") throw new Error("No pull request found for the commit")
@@ -101,36 +95,30 @@ jobs:
name: Docs Deploy name: Docs Deploy
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: checks needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }} if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Load parameters - name: Load parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
with: with:
script: | script: |
const parameters = JSON.parse(process.env.PARAM_JSON); const json = `${{ needs.checks.outputs.parameters }}`;
const parameters = JSON.parse(json);
core.setOutput("event", parameters.event); core.setOutput("event", parameters.event);
core.setOutput("name", parameters.name); core.setOutput("name", parameters.name);
core.setOutput("shouldDeploy", parameters.shouldDeploy); core.setOutput("shouldDeploy", parameters.shouldDeploy);
- run: |
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
- name: Download artifact - name: Download artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
with: with:
script: | script: |
let artifact = JSON.parse(process.env.ARTIFACT_JSON); let artifact = ${{ needs.checks.outputs.artifact }};
let download = await github.rest.actions.downloadArtifact({ let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
@@ -150,12 +138,12 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'apply' tg_command: "apply"
- name: Deploy Docs Subdomain Output - name: Deploy Docs Subdomain Output
id: docs-output id: docs-output
@@ -165,30 +153,27 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'output -json' tg_command: "output -json"
- name: Output Cleaning - name: Output Cleaning
id: clean id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: | run: |
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .) TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT echo "output=$TG_OUT" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages - name: Publish to Cloudflare Pages
# TODO: Action is deprecated uses: cloudflare/pages-action@v1
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }} projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
workingDirectory: 'docs' workingDirectory: "docs"
directory: 'build' directory: "build"
branch: ${{ steps.parameters.outputs.name }} branch: ${{ steps.parameters.outputs.name }}
wranglerVersion: '3' wranglerVersion: '3'
@@ -199,7 +184,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -207,7 +192,7 @@ jobs:
tg_command: 'apply' tg_command: 'apply'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@v3
if: ${{ steps.parameters.outputs.event == 'pr' }} if: ${{ steps.parameters.outputs.event == 'pr' }}
with: with:
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }} number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}

View File

@@ -1,39 +1,32 @@
name: Docs destroy name: Docs destroy
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [closed] types: [closed]
permissions: {}
jobs: jobs:
deploy: deploy:
name: Docs Destroy name: Docs Destroy
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Destroy Docs Subdomain - name: Destroy Docs Subdomain
env: env:
TF_VAR_prefix_name: 'pr-${{ github.event.number }}' TF_VAR_prefix_name: "pr-${{ github.event.number }}"
TF_VAR_prefix_event_type: 'pr' TF_VAR_prefix_event_type: "pr"
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'destroy -refresh=false' tg_command: "destroy -refresh=false"
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@v3
with: with:
number: ${{ github.event.number }} number: ${{ github.event.number }}
delete: true delete: true

View File

@@ -4,32 +4,28 @@ on:
pull_request: pull_request:
types: [labeled] types: [labeled]
permissions: {}
jobs: jobs:
fix-formatting: fix-formatting:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }} if: ${{ github.event.label.name == 'fix:formatting' }}
permissions: permissions:
contents: write
pull-requests: write pull-requests: write
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout' - name: 'Checkout'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.ref }} ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -37,13 +33,13 @@ jobs:
run: make install-all && make format-all run: make install-all && make format-all
- name: Commit and push - name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 uses: EndBug/add-and-commit@v9
with: with:
default_author: github_actions default_author: github_actions
message: 'chore: fix formatting' message: 'chore: fix formatting'
- name: Remove label - name: Remove label
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
if: always() if: always()
with: with:
script: | script: |
@@ -53,3 +49,4 @@ jobs:
repo: context.repo.repo, repo: context.repo.repo,
name: 'fix:formatting' name: 'fix:formatting'
}) })

View File

@@ -1,11 +1,9 @@
name: PR Label Validation name: PR Label Validation
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [opened, labeled, unlabeled, synchronize] types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs: jobs:
validate-release-label: validate-release-label:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -14,11 +12,11 @@ jobs:
pull-requests: write pull-requests: write
steps: steps:
- name: Require PR to have a changelog label - name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0 uses: mheap/github-action-required-labels@v5
with: with:
mode: exactly mode: exactly
count: 1 count: 1
use_regex: true use_regex: true
labels: 'changelog:.*' labels: "changelog:.*"
add_comment: true add_comment: true
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.' message: "Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label."

View File

@@ -1,8 +1,6 @@
name: 'Pull Request Labeler' name: "Pull Request Labeler"
on: on:
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here - pull_request_target
permissions: {}
jobs: jobs:
labeler: labeler:
@@ -11,4 +9,4 @@ jobs:
pull-requests: write pull-requests: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 - uses: actions/labeler@v5

View File

@@ -4,16 +4,12 @@ on:
pull_request: pull_request:
types: [opened, synchronize, reopened, edited] types: [opened, synchronize, reopened, edited]
permissions: {}
jobs: jobs:
validate-pr-title: validate-pr-title:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
pull-requests: write
steps: steps:
- name: PR Conventional Commit Validation - name: PR Conventional Commit Validation
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0 uses: ytanikin/PRConventionalCommits@1.3.0
with: with:
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]' task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
add_label: 'false' add_label: 'false'

View File

@@ -21,40 +21,35 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
bump_version: bump_version:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }} ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Install uv - name: Install Poetry
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2 run: pipx install poetry
- name: Bump version - name: Bump version
env: run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
SERVER_BUMP: ${{ inputs.serverBump }}
MOBILE_BUMP: ${{ inputs.mobileBump }}
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
- name: Commit and tag - name: Commit and tag
id: push-tag id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 uses: EndBug/add-and-commit@v9
with: with:
default_author: github_actions default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}' message: 'chore: version ${{ env.IMMICH_VERSION }}'
@@ -64,47 +59,37 @@ jobs:
build_mobile: build_mobile:
uses: ./.github/workflows/build-mobile.yml uses: ./.github/workflows/build-mobile.yml
needs: bump_version needs: bump_version
permissions: secrets: inherit
contents: read
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
with: with:
ref: ${{ needs.bump_version.outputs.ref }} ref: ${{ needs.bump_version.outputs.ref }}
prepare_release: prepare_release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build_mobile needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
- name: Download APK - name: Download APK
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v4
with: with:
name: release-apk-signed name: release-apk-signed
- name: Create draft release - name: Create draft release
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2 uses: softprops/action-gh-release@v2
with: with:
draft: true draft: true
tag_name: ${{ env.IMMICH_VERSION }} tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true generate_release_notes: true
body_path: misc/release/notes.tmpl body_path: misc/release/notes.tmpl
files: | files: |

View File

@@ -1,35 +0,0 @@
name: Preview label
on:
pull_request:
types: [labeled, closed]
permissions: {}
jobs:
comment-status:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'preview' }}
permissions:
pull-requests: write
steps:
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
with:
message-id: 'preview-status'
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
remove-label:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'preview') }}
permissions:
pull-requests: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'preview'
})

View File

@@ -4,24 +4,20 @@ on:
release: release:
types: [published] types: [published]
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: Publish `@immich/sdk` name: Publish `@immich/sdk`
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 - uses: actions/setup-node@v4
with: with:
node-version-file: './open-api/typescript-sdk/.nvmrc' node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'

View File

@@ -9,47 +9,37 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
mobile: mobile:
- 'mobile/**' - 'mobile/**'
workflow:
- '.github/workflows/static_analysis.yml'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
mobile-dart-analyze: mobile-dart-analyze:
name: Run Dart Code Analysis name: Run Dart Code Analysis
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
@@ -58,44 +48,6 @@ jobs:
run: dart pub get run: dart pub get
working-directory: ./mobile working-directory: ./mobile
- name: Install DCM
run: |
sudo apt-get update
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
sudo apt-get update
sudo apt-get install dcm
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Run Build Runner
run: make build
working-directory: ./mobile
- name: Generate platform API
run: make pigeon
working-directory: ./mobile
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
mobile/**/*.g.dart
mobile/**/*.gr.dart
mobile/**/*.drift.dart
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
echo "Changed files: ${CHANGED_FILES}"
exit 1
- name: Run dart analyze - name: Run dart analyze
run: dart analyze --fatal-infos run: dart analyze --fatal-infos
working-directory: ./mobile working-directory: ./mobile
@@ -108,33 +60,7 @@ jobs:
run: dart run custom_lint run: dart run custom_lint
working-directory: ./mobile working-directory: ./mobile
- name: Run DCM # Enable after riverpod generator migration is completed
run: dcm analyze lib # - name: Run dart custom lint
working-directory: ./mobile # run: dart run custom_lint
# working-directory: ./mobile
zizmor:
name: zizmor
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Run zizmor 🌈
run: uvx zizmor --format=sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -9,15 +9,10 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
@@ -26,19 +21,13 @@ jobs:
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
i18n:
- 'i18n/**'
web: web:
- 'web/**' - 'web/**'
- 'i18n/**' - 'i18n/**'
@@ -54,34 +43,26 @@ jobs:
- 'mobile/**' - 'mobile/**'
machine-learning: machine-learning:
- 'machine-learning/**' - 'machine-learning/**'
workflow:
- '.github/workflows/test.yml'
.github:
- '.github/**'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
server-unit-tests: server-unit-tests:
name: Test & Lint Server name: Test & Lint Server
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./server working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -101,7 +82,7 @@ jobs:
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run small tests & coverage - name: Run small tests & coverage
run: npm test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
cli-unit-tests: cli-unit-tests:
@@ -109,20 +90,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
@@ -146,7 +123,7 @@ jobs:
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: npm run test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
cli-unit-tests-win: cli-unit-tests-win:
@@ -154,20 +131,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest runs-on: windows-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
@@ -184,28 +157,24 @@ jobs:
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: npm run test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
web-lint: web-unit-tests:
name: Lint Web name: Test & Lint Web
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: mich runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./web working-directory: ./web
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './web/.nvmrc' node-version-file: './web/.nvmrc'
@@ -217,7 +186,7 @@ jobs:
run: npm ci run: npm ci
- name: Run linter - name: Run linter
run: npm run lint:p run: npm run lint
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run formatter - name: Run formatter
@@ -228,102 +197,29 @@ jobs:
run: npm run check:svelte run: npm run check:svelte
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: npm ci
- name: Run tsc - name: Run tsc
run: npm run check:typescript run: npm run check:typescript
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: npm run test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
- name: Install dependencies
run: npm --prefix=web ci
- name: Format
run: npm --prefix=web run format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
i18n/**
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: i18n files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
e2e-tests-lint: e2e-tests-lint:
name: End-to-End Lint name: End-to-End Lint
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -348,58 +244,43 @@ jobs:
run: npm run check run: npm run check
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
server-medium-tests: medium-tests-server:
name: Medium Tests (Server) name: Medium Tests (Server)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
persist-credentials: false submodules: 'recursive'
- name: Setup Node - name: Production build
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 if: ${{ !cancelled() }}
with: run: docker compose -f e2e/docker-compose.yml build
node-version-file: './server/.nvmrc'
- name: Run npm install
run: npm ci
- name: Run medium tests - name: Run medium tests
run: npm run test:medium
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: make test-medium
e2e-tests-server-cli: e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI) name: End-to-End Tests (Server & CLI)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -429,25 +310,19 @@ jobs:
name: End-to-End Tests (Web) name: End-to-End Tests (Web)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -472,39 +347,18 @@ jobs:
run: npx playwright test run: npx playwright test
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
with:
needs: ${{ toJSON(needs) }}
mobile-unit-tests: mobile-unit-tests:
name: Unit Test Mobile name: Unit Test Mobile
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }} if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Run tests - name: Run tests
working-directory: ./mobile working-directory: ./mobile
run: flutter test -j 1 run: flutter test -j 1
@@ -514,99 +368,55 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./machine-learning working-directory: ./machine-learning
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with: with:
persist-credentials: false python-version: 3.11
cache: 'poetry'
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11
# cache: 'uv'
- name: Install dependencies - name: Install dependencies
run: | run: |
uv sync --extra cpu poetry install --with dev --with cpu
- name: Lint with ruff - name: Lint with ruff
run: | run: |
uv run ruff check --output-format=github immich_ml poetry run ruff check --output-format=github app export
- name: Check black formatting - name: Check black formatting
run: | run: |
uv run black --check immich_ml poetry run black --check app export
- name: Run mypy type checking - name: Run mypy type checking
run: | run: |
uv run mypy --strict immich_ml/ poetry run mypy --install-types --non-interactive --strict app/
- name: Run tests and coverage - name: Run tests and coverage
run: | run: |
uv run pytest --cov=immich_ml --cov-report term-missing poetry run pytest app --cov=app --cov-report term-missing
github-files-formatting:
name: .github Files Formatting
needs: pre-job
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./.github
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
- name: Run npm install
run: npm ci
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
shellcheck: shellcheck:
name: ShellCheck name: ShellCheck
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Run ShellCheck - name: Run ShellCheck
uses: ludeeus/action-shellcheck@master uses: ludeeus/action-shellcheck@master
with: with:
ignore_paths: >- ignore_paths: >-
**/open-api/** **/open-api/**
**/openapi** **/openapi/**
**/node_modules/** **/node_modules/**
generated-api-up-to-date: generated-api-up-to-date:
name: OpenAPI Clients name: OpenAPI Clients
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -620,7 +430,7 @@ jobs:
run: make open-api run: make open-api
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
@@ -630,21 +440,17 @@ jobs:
- name: Verify files have not changed - name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated files not up to date!" echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1 exit 1
sql-schema-up-to-date: generated-typeorm-migrations-up-to-date:
name: SQL Schema Checks name: TypeORM Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
services: services:
postgres: postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env: env:
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres POSTGRES_USER: postgres
@@ -662,12 +468,10 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -678,29 +482,26 @@ jobs:
run: npm run build run: npm run build
- name: Run existing migrations - name: Run existing migrations
run: npm run migrations:run run: npm run typeorm:migrations:run
- name: Test npm run schema:reset command works - name: Test npm run schema:reset command works
run: npm run schema:reset run: npm run typeorm:schema:reset
- name: Generate new migrations - name: Generate new migrations
continue-on-error: true continue-on-error: true
run: npm run migrations:generate src/TestMigration run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
server/src server/src/migrations/
- name: Verify migration files have not changed - name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated migration files not up to date!" echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
cat ./src/*-TestMigration.ts
exit 1 exit 1
- name: Run SQL generation - name: Run SQL generation
@@ -709,7 +510,7 @@ jobs:
DB_URL: postgres://postgres:postgres@localhost:5432/immich DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-sql-files id: verify-changed-sql-files
with: with:
files: | files: |
@@ -717,12 +518,9 @@ jobs:
- name: Verify SQL files have not changed - name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true' if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated SQL files not up to date!" echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
git diff
exit 1 exit 1
# mobile-integration-tests: # mobile-integration-tests:

View File

@@ -1,57 +0,0 @@
name: Weblate checks
on:
pull_request:
branches: [main]
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
with:
filters: |
i18n:
- 'i18n/!(en)**\.json'
enforce-lock:
name: Check Weblate Lock
needs: [pre-job]
runs-on: ubuntu-latest
permissions: {}
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
steps:
- name: Check weblate lock
run: |
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
exit 1
fi
- name: Find Pull Request
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0
id: find-pr
with:
branch: chore/translations
- name: Fail if existing weblate PR
if: ${{ steps.find-pr.outputs.number }}
run: exit 1
success-check-lock:
name: Weblate Lock Check Success
needs: [enforce-lock]
runs-on: ubuntu-latest
permissions: {}
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
with:
needs: ${{ toJSON(needs) }}

1
.gitignore vendored
View File

@@ -3,7 +3,6 @@
.DS_Store .DS_Store
.vscode/* .vscode/*
!.vscode/launch.json !.vscode/launch.json
!.vscode/extensions.json
.idea .idea
docker/upload docker/upload

View File

@@ -1,10 +0,0 @@
{
"recommendations": [
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"dbaeumer.vscode-eslint",
"dart-code.flutter",
"dart-code.dart-code",
"dcmdev.dcm-vscode-extension"
]
}

77
.vscode/settings.json vendored
View File

@@ -1,63 +1,44 @@
{ {
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[css]": { "[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true, "editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2 "editor.tabSize": 2
}, },
"svelte.enable-ts-plugin": true,
"eslint.validate": [
"javascript",
"svelte"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"[dart]": { "[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.selectionHighlight": false, "editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false, "editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first", "editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets", "editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off" "editor.wordBasedSuggestions": "off",
"editor.defaultFormatter": "Dart-Code.dart-code"
}, },
"[javascript]": { "cSpell.words": [
"editor.codeActionsOnSave": { "immich"
"source.organizeImports": "explicit", ],
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"explorer.fileNesting.enabled": true, "explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": { "explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts" "*.ts": "${capture}.spec.ts,${capture}.mock.ts"
}, }
"svelte.enable-ts-plugin": true, }
"typescript.preferences.importModuleSpecifier": "non-relative"
}

72
.vscode/tasks.json vendored
View File

@@ -1,72 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Fix Permissions, Install Dependencies",
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich API Server (Nest)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Web Server (Vite)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Server and Web",
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
"runOptions": {
"runOn": "folderOpen"
},
"problemMatcher": []
}
]
}

View File

@@ -17,9 +17,6 @@ e2e:
prod: prod:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
prod-scale: prod-scale:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
@@ -42,7 +39,7 @@ attach-server:
renovate: renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
MODULES = e2e server web cli sdk docs .github MODULES = e2e server web cli sdk docs
audit-%: audit-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
@@ -80,14 +77,14 @@ test-medium:
test-medium-dev: test-medium-dev:
docker exec -it immich_server /bin/sh -c "npm run test:medium" docker exec -it immich_server /bin/sh -c "npm run test:medium"
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ; build-all: $(foreach M,$(filter-out e2e,$(MODULES)),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ; install-all: $(foreach M,$(MODULES),install-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ; check-all: $(foreach M,$(filter-out sdk cli docs,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ; lint-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),lint-$M) ;
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ; format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
audit-all: $(foreach M,$(MODULES),audit-$M) ; audit-all: $(foreach M,$(MODULES),audit-$M) ;
hygiene-all: lint-all format-all check-all sql audit-all; hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ; test-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),test-$M) ;
clean: clean:
find . -name "node_modules" -type d -prune -exec rm -rf '{}' + find . -name "node_modules" -type d -prune -exec rm -rf '{}' +

View File

@@ -1,11 +1,11 @@
<p align="center"> <p align="center">
<br/> <br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a> <a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
<a href="https://discord.immich.app"> <a href="https://discord.immich.app">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/> <img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a> </a>
<br/> <br/>
<br/> <br/>
</p> </p>
<p align="center"> <p align="center">
@@ -61,7 +61,9 @@
## Demo ## Demo
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`. Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
### Login credentials ### Login credentials
@@ -102,7 +104,7 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
| Read-only gallery | Yes | Yes | | Read-only gallery | Yes | Yes |
| Stacked Photos | Yes | Yes | | Stacked Photos | Yes | Yes |
| Tags | No | Yes | | Tags | No | Yes |
| Folder View | Yes | Yes | | Folder View | No | Yes |
## Translations ## Translations

View File

@@ -1 +1 @@
22.16.0 22.13.1

View File

@@ -1,4 +1,4 @@
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core FROM node:22.13.1-alpine3.20@sha256:c52e20859a92b3eccbd3a36c5e1a90adc20617d8d421d65e8a622e87b5dac963 AS core
WORKDIR /usr/src/open-api/typescript-sdk WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./ COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

View File

@@ -1,29 +1,39 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js'; import js from '@eslint/js';
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended'; import typescriptEslint from '@typescript-eslint/eslint-plugin';
import eslintPluginUnicorn from 'eslint-plugin-unicorn'; import tsParser from '@typescript-eslint/parser';
import globals from 'globals'; import globals from 'globals';
import path from 'node:path'; import path from 'node:path';
import { fileURLToPath } from 'node:url'; import { fileURLToPath } from 'node:url';
import typescriptEslint from 'typescript-eslint';
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename); const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default typescriptEslint.config([ export default [
eslintPluginUnicorn.configs.recommended,
eslintPluginPrettierRecommended,
js.configs.recommended,
typescriptEslint.configs.recommended,
{ {
ignores: ['eslint.config.mjs', 'dist'], ignores: ['eslint.config.mjs', 'dist'],
}, },
...compat.extends(
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
'plugin:unicorn/recommended',
),
{ {
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: { languageOptions: {
globals: { globals: {
...globals.node, ...globals.node,
}, },
parser: typescriptEslint.parser, parser: tsParser,
ecmaVersion: 5, ecmaVersion: 5,
sourceType: 'module', sourceType: 'module',
@@ -48,4 +58,4 @@ export default typescriptEslint.config([
'object-shorthand': ['error', 'always'], 'object-shorthand': ['error', 'always'],
}, },
}, },
]); ];

2233
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@immich/cli", "name": "@immich/cli",
"version": "2.2.70", "version": "2.2.47",
"description": "Command Line Interface (CLI) for Immich", "description": "Command Line Interface (CLI) for Immich",
"type": "module", "type": "module",
"exports": "./dist/index.js", "exports": "./dist/index.js",
@@ -19,9 +19,10 @@
"@types/byte-size": "^8.1.0", "@types/byte-size": "^8.1.0",
"@types/cli-progress": "^3.11.0", "@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12", "@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1", "@types/mock-fs": "^4.13.1",
"@types/node": "^22.15.31", "@types/node": "^22.10.9",
"@typescript-eslint/eslint-plugin": "^8.15.0",
"@typescript-eslint/parser": "^8.15.0",
"@vitest/coverage-v8": "^3.0.0", "@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0", "byte-size": "^9.0.0",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",
@@ -29,13 +30,12 @@
"eslint": "^9.14.0", "eslint": "^9.14.0",
"eslint-config-prettier": "^10.0.0", "eslint-config-prettier": "^10.0.0",
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^59.0.0", "eslint-plugin-unicorn": "^56.0.1",
"globals": "^16.0.0", "globals": "^15.9.0",
"mock-fs": "^5.2.0", "mock-fs": "^5.2.0",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0", "prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3", "typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^6.0.0", "vite": "^6.0.0",
"vite-tsconfig-paths": "^5.0.0", "vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0", "vitest": "^3.0.0",
@@ -62,13 +62,11 @@
"node": ">=20.0.0" "node": ">=20.0.0"
}, },
"dependencies": { "dependencies": {
"chokidar": "^4.0.3",
"fast-glob": "^3.3.2", "fast-glob": "^3.3.2",
"fastq": "^1.17.1", "fastq": "^1.17.1",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21"
"micromatch": "^4.0.8"
}, },
"volta": { "volta": {
"node": "22.16.0" "node": "22.13.1"
} }
} }

View File

@@ -1,13 +1,12 @@
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import * as os from 'node:os'; import * as os from 'node:os';
import * as path from 'node:path'; import * as path from 'node:path';
import { setTimeout as sleep } from 'node:timers/promises'; import { describe, expect, it, vi } from 'vitest';
import { describe, expect, it, MockedFunction, vi } from 'vitest';
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk'; import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
import createFetchMock from 'vitest-fetch-mock'; import createFetchMock from 'vitest-fetch-mock';
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset'; import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
vi.mock('@immich/sdk'); vi.mock('@immich/sdk');
@@ -200,112 +199,3 @@ describe('checkForDuplicates', () => {
}); });
}); });
}); });
describe('startWatch', () => {
let testFolder: string;
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
beforeEach(async () => {
vi.restoreAllMocks();
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
image: ['.jpg'],
sidecar: ['.xmp'],
video: ['.mp4'],
});
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
checkBulkUploadMocked.mockResolvedValue({
results: [],
});
});
it('should start watching a directory and upload new files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
expect.objectContaining({
id: testFilePath,
}),
],
},
});
});
it('should filter out unsupported files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const unsupportedFilePath = path.join(testFolder, 'test.txt');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: unsupportedFilePath,
}),
]),
},
});
});
it('should filger out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);
await fs.promises.mkdir(ignoredFolder, { recursive: true });
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: ignoredFilePath,
}),
]),
},
});
});
afterEach(async () => {
await fs.promises.rm(testFolder, { recursive: true, force: true });
});
});

View File

@@ -12,18 +12,13 @@ import {
getSupportedMediaTypes, getSupportedMediaTypes,
} from '@immich/sdk'; } from '@immich/sdk';
import byteSize from 'byte-size'; import byteSize from 'byte-size';
import { Matcher, watch as watchFs } from 'chokidar';
import { MultiBar, Presets, SingleBar } from 'cli-progress'; import { MultiBar, Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es'; import { chunk } from 'lodash-es';
import micromatch from 'micromatch';
import { Stats, createReadStream } from 'node:fs'; import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises'; import { stat, unlink } from 'node:fs/promises';
import path, { basename } from 'node:path'; import path, { basename } from 'node:path';
import { Queue } from 'src/queue'; import { Queue } from 'src/queue';
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils'; import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const UPLOAD_WATCH_BATCH_SIZE = 100;
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
const s = (count: number) => (count === 1 ? '' : 's'); const s = (count: number) => (count === 1 ? '' : 's');
@@ -41,9 +36,6 @@ export interface UploadOptionsDto {
albumName?: string; albumName?: string;
includeHidden?: boolean; includeHidden?: boolean;
concurrency: number; concurrency: number;
progress?: boolean;
watch?: boolean;
jsonOutput?: boolean;
} }
class UploadFile extends File { class UploadFile extends File {
@@ -63,100 +55,19 @@ class UploadFile extends File {
} }
} }
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
const { newFiles, duplicates } = await checkForDuplicates(files, options);
const newAssets = await uploadFiles(newFiles, options);
if (options.jsonOutput) {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
};
export const startWatch = async (
paths: string[],
options: UploadOptionsDto,
{
batchSize = UPLOAD_WATCH_BATCH_SIZE,
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
}: { batchSize?: number; debounceTimeMs?: number } = {},
) => {
const watcherIgnored: Matcher[] = [];
const { image, video } = await getSupportedMediaTypes();
const extensions = new Set([...image, ...video]);
if (options.ignore) {
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
}
const pathsBatcher = new Batcher<string>({
batchSize,
debounceTimeMs,
onBatch: async (paths: string[]) => {
const uniquePaths = [...new Set(paths)];
await uploadBatch(uniquePaths, options);
},
});
const onFile = async (path: string, stats?: Stats) => {
if (stats?.isDirectory()) {
return;
}
const ext = '.' + path.split('.').pop()?.toLowerCase();
if (!ext || !extensions.has(ext)) {
return;
}
if (!options.progress) {
// logging when progress is disabled as it can cause issues with the progress bar rendering
console.log(`Change detected: ${path}`);
}
pathsBatcher.add(path);
};
const fsWatcher = watchFs(paths, {
ignoreInitial: true,
ignored: watcherIgnored,
alwaysStat: true,
awaitWriteFinish: true,
depth: options.recursive ? undefined : 1,
persistent: true,
})
.on('add', onFile)
.on('change', onFile)
.on('error', (error) => console.error(`Watcher error: ${error}`));
process.on('SIGINT', async () => {
console.log('Exiting...');
await fsWatcher.close();
process.exit();
});
};
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => { export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
await authenticate(baseOptions); await authenticate(baseOptions);
const scanFiles = await scan(paths, options); const scanFiles = await scan(paths, options);
if (scanFiles.length === 0) { if (scanFiles.length === 0) {
if (options.watch) { console.log('No files found, exiting');
console.log('No files found initially.'); return;
} else {
console.log('No files found, exiting');
return;
}
} }
if (options.watch) { const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
console.log('Watching for changes...'); const newAssets = await uploadFiles(newFiles, options);
await startWatch(paths, options); await updateAlbums([...newAssets, ...duplicates], options);
// watcher does not handle the initial scan await deleteFiles(newFiles, options);
// as the scan() is a more efficient quick start with batched results
}
await uploadBatch(scanFiles, options);
}; };
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => { const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
@@ -174,25 +85,19 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
return files; return files;
}; };
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => { export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
if (skipHash) { if (skipHash) {
console.log('Skipping hash check, assuming all files are new'); console.log('Skipping hash check, assuming all files are new');
return { newFiles: files, duplicates: [] }; return { newFiles: files, duplicates: [] };
} }
let multiBar: MultiBar | undefined; const multiBar = new MultiBar(
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
if (progress) { const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
multiBar = new MultiBar( const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
} else {
console.log(`Received ${files.length} files, hashing...`);
}
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
const newFiles: string[] = []; const newFiles: string[] = [];
const duplicates: Asset[] = []; const duplicates: Asset[] = [];
@@ -212,7 +117,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
} }
} }
checkProgressBar?.increment(assets.length); checkProgressBar.increment(assets.length);
}, },
{ concurrency, retry: 3 }, { concurrency, retry: 3 },
); );
@@ -232,7 +137,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
void checkBulkUploadQueue.push(batch); void checkBulkUploadQueue.push(batch);
} }
hashProgressBar?.increment(); hashProgressBar.increment();
return results; return results;
}, },
{ concurrency, retry: 3 }, { concurrency, retry: 3 },
@@ -250,7 +155,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
await checkBulkUploadQueue.drained(); await checkBulkUploadQueue.drained();
multiBar?.stop(); multiBar.stop();
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`); console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
@@ -266,10 +171,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
return { newFiles, duplicates }; return { newFiles, duplicates };
}; };
export const uploadFiles = async ( export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
files: string[],
{ dryRun, concurrency, progress }: UploadOptionsDto,
): Promise<Asset[]> => {
if (files.length === 0) { if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.'); console.log('All assets were already uploaded, nothing to do.');
return []; return [];
@@ -289,20 +191,12 @@ export const uploadFiles = async (
return files.map((filepath) => ({ id: '', filepath })); return files.map((filepath) => ({ id: '', filepath }));
} }
let uploadProgress: SingleBar | undefined; const uploadProgress = new SingleBar(
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
if (progress) { Presets.shades_classic,
uploadProgress = new SingleBar( );
{ uploadProgress.start(totalSize, 0);
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}', uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
},
Presets.shades_classic,
);
} else {
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
}
uploadProgress?.start(totalSize, 0);
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let duplicateCount = 0; let duplicateCount = 0;
let duplicateSize = 0; let duplicateSize = 0;
@@ -328,7 +222,7 @@ export const uploadFiles = async (
successSize += stats.size ?? 0; successSize += stats.size ?? 0;
} }
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) }); uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
return response; return response;
}, },
@@ -341,7 +235,7 @@ export const uploadFiles = async (
await queue.drained(); await queue.drained();
uploadProgress?.stop(); uploadProgress.stop();
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`); console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
if (duplicateCount > 0) { if (duplicateCount > 0) {

View File

@@ -68,19 +68,7 @@ program
.env('IMMICH_UPLOAD_CONCURRENCY') .env('IMMICH_UPLOAD_CONCURRENCY')
.default(4), .default(4),
) )
.addOption(
new Option('-j, --json-output', 'Output detailed information in json format')
.env('IMMICH_JSON_OUTPUT')
.default(false),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS')) .addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')
.env('IMMICH_WATCH_CHANGES')
.default(false)
.implies({ progress: false }),
)
.argument('[paths...]', 'One or more paths to assets to be uploaded') .argument('[paths...]', 'One or more paths to assets to be uploaded')
.action((paths, options) => upload(paths, program.opts(), options)); .action((paths, options) => upload(paths, program.opts(), options));

View File

@@ -1,7 +1,6 @@
import mockfs from 'mock-fs'; import mockfs from 'mock-fs';
import { readFileSync } from 'node:fs'; import { readFileSync } from 'node:fs';
import { Batcher, CrawlOptions, crawl } from 'src/utils'; import { CrawlOptions, crawl } from 'src/utils';
import { Mock } from 'vitest';
interface Test { interface Test {
test: string; test: string;
@@ -304,38 +303,3 @@ describe('crawl', () => {
} }
}); });
}); });
describe('Batcher', () => {
let batcher: Batcher;
let onBatch: Mock;
beforeEach(() => {
onBatch = vi.fn();
batcher = new Batcher({ batchSize: 2, onBatch });
});
it('should trigger onBatch() when a batch limit is reached', async () => {
batcher.add('a');
batcher.add('b');
batcher.add('c');
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
});
it('should trigger onBatch() when flush() is called', async () => {
batcher.add('a');
batcher.flush();
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
});
it('should trigger onBatch() when debounce time reached', async () => {
vi.useFakeTimers();
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
batcher.add('a');
expect(onBatch).not.toHaveBeenCalled();
vi.advanceTimersByTime(200);
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
vi.useRealTimers();
});
});

View File

@@ -172,64 +172,3 @@ export const sha1 = (filepath: string) => {
rs.on('end', () => resolve(hash.digest('hex'))); rs.on('end', () => resolve(hash.digest('hex')));
}); });
}; };
/**
* Batches items and calls onBatch to process them
* when the batch size is reached or the debounce time has passed.
*/
export class Batcher<T = unknown> {
private items: T[] = [];
private readonly batchSize: number;
private readonly debounceTimeMs?: number;
private readonly onBatch: (items: T[]) => void;
private debounceTimer?: NodeJS.Timeout;
constructor({
batchSize,
debounceTimeMs,
onBatch,
}: {
batchSize: number;
debounceTimeMs?: number;
onBatch: (items: T[]) => Promise<void>;
}) {
this.batchSize = batchSize;
this.debounceTimeMs = debounceTimeMs;
this.onBatch = onBatch;
}
private setDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
if (this.debounceTimeMs) {
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
}
}
private clearDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
this.debounceTimer = undefined;
}
}
add(item: T) {
this.items.push(item);
this.setDebounceTimer();
if (this.items.length >= this.batchSize) {
this.flush();
}
}
flush() {
this.clearDebounceTimer();
if (this.items.length === 0) {
return;
}
this.onBatch(this.items);
this.items = [];
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" { provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.0" version = "4.50.0"
constraints = "4.52.0" constraints = "4.50.0"
hashes = [ hashes = [
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=", "h1:0qvD5ZKn2tMZ8cOjQrUSITIC9tKCZbrSaSswV9lOyiU=",
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=", "h1:4N0gplrZ0zOsJv3Kx1VfIx2FwrZHbYU0Un2yfiLZIGQ=",
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=", "h1:81AMQq4kNKU/35U8ElQegUxG4E6xB0erIjG5xVmjIyo=",
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=", "h1:EEQNADUmV3IL6x00yzy04i7OCSLeOMgM9XQkV3w71gA=",
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=", "h1:HD0KI7td6oiSSAnJNn8UPSGf+hKiTo4JVQYfAiU1SqM=",
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=", "h1:Hl+o5LtcvZg2f3l1hh9vaG/DFK6k+dTIZSeM0lXyfpo=",
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=", "h1:ZUO2oIJ6jtZdvl816h0cEIiIeZ/fFCF64+abGEVxZZM=",
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=", "h1:Zio80fnEeUKdlSOhTVskMEFSLUQ6TMsMKnXc+Dy2P2A=",
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=", "h1:aLLvg36evTyqjtXGV2MjAV8imktXFmry7p/xCu9GQC4=",
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=", "h1:azL05eWyy2V8SWkbZZImPWvv8ynG4eqmrbZhjXBDFug=",
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=", "h1:ckMysHY4fJmr7o58XMi+DdgOTB/U/Mf1u1JA9ly3g/I=",
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=", "h1:jxOwjDNjt5WCb4YjjiMsman91O8Y+MAPz6UwJ4a6F+0=",
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=", "h1:u4OfnjSLa4Wk1IUFAzrvMnGgr8MvRHEWVDHEScPK2E8=",
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=", "h1:wQkR1oeSkzlHn3rnVuLJRJLBHlg4EHt7Y64DeTjfkjQ=",
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0", "zh:0ef99ed39472a94e6a0d6fa733cf0a46bce3bf66eba2873efae8846efdddc237",
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8", "zh:2929cbbffcead171d45c88e4a7a59e9c013ea775dafa68b10da8db7cd04b6140",
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238", "zh:462601c87118088e1a718842e367af7d8e7620598d426980a6d6b33de759865e",
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f", "zh:56766eb62a74a9d88d9efb8486dd3a0c5c9db873d0a980ae9ef1e8af27d74231",
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f", "zh:6b4e8810d99498a5a20a5872982a0f1354e79cfc4a7dfe7cc656f1c7eaae47d8",
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8", "zh:6d65bdb4ec94b6eecc8abe26d94e2ca09262dc1e7a9934db829f418be0119920",
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9", "zh:71adeaf31e41a358ec6095004062e43f56ee7d4b2504e5613ab351d511695641",
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f", "zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6", "zh:89761c15908ccc2cf9c50bb5cb3be45d3ad0c45fc7c608c6b95f48c0288b7160",
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb", "zh:8cc5d7c5939da89cfd01f3e51c84f3576564783acea9db86bd9e32049805ed96",
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b", "zh:987cff8225b1dd436cdcb4fc6228689ae7e4281de6896412a2a9a3325c49f05e",
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380", "zh:991e83ebb89867d71e01a1c215ed159efb425683b0a44707be8579eb0a337f06",
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7", "zh:ab8177ae2d8f5cfa90043a6f867435012cae115f6061b832a7e2462e0ae87a67",
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672", "zh:d1ca34df1398f201274a6a18102975148c10ca15aa43cfc56cc9897620929509",
"zh:d34946f70201baf6dda03e3b294c6bbe40d95d0278e97b9f636ded94822b24ac",
] ]
} }

View File

@@ -5,7 +5,7 @@ terraform {
required_providers { required_providers {
cloudflare = { cloudflare = {
source = "cloudflare/cloudflare" source = "cloudflare/cloudflare"
version = "4.52.0" version = "4.50.0"
} }
} }
} }

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" { provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.0" version = "4.50.0"
constraints = "4.52.0" constraints = "4.50.0"
hashes = [ hashes = [
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=", "h1:0qvD5ZKn2tMZ8cOjQrUSITIC9tKCZbrSaSswV9lOyiU=",
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=", "h1:4N0gplrZ0zOsJv3Kx1VfIx2FwrZHbYU0Un2yfiLZIGQ=",
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=", "h1:81AMQq4kNKU/35U8ElQegUxG4E6xB0erIjG5xVmjIyo=",
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=", "h1:EEQNADUmV3IL6x00yzy04i7OCSLeOMgM9XQkV3w71gA=",
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=", "h1:HD0KI7td6oiSSAnJNn8UPSGf+hKiTo4JVQYfAiU1SqM=",
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=", "h1:Hl+o5LtcvZg2f3l1hh9vaG/DFK6k+dTIZSeM0lXyfpo=",
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=", "h1:ZUO2oIJ6jtZdvl816h0cEIiIeZ/fFCF64+abGEVxZZM=",
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=", "h1:Zio80fnEeUKdlSOhTVskMEFSLUQ6TMsMKnXc+Dy2P2A=",
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=", "h1:aLLvg36evTyqjtXGV2MjAV8imktXFmry7p/xCu9GQC4=",
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=", "h1:azL05eWyy2V8SWkbZZImPWvv8ynG4eqmrbZhjXBDFug=",
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=", "h1:ckMysHY4fJmr7o58XMi+DdgOTB/U/Mf1u1JA9ly3g/I=",
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=", "h1:jxOwjDNjt5WCb4YjjiMsman91O8Y+MAPz6UwJ4a6F+0=",
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=", "h1:u4OfnjSLa4Wk1IUFAzrvMnGgr8MvRHEWVDHEScPK2E8=",
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=", "h1:wQkR1oeSkzlHn3rnVuLJRJLBHlg4EHt7Y64DeTjfkjQ=",
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0", "zh:0ef99ed39472a94e6a0d6fa733cf0a46bce3bf66eba2873efae8846efdddc237",
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8", "zh:2929cbbffcead171d45c88e4a7a59e9c013ea775dafa68b10da8db7cd04b6140",
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238", "zh:462601c87118088e1a718842e367af7d8e7620598d426980a6d6b33de759865e",
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f", "zh:56766eb62a74a9d88d9efb8486dd3a0c5c9db873d0a980ae9ef1e8af27d74231",
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f", "zh:6b4e8810d99498a5a20a5872982a0f1354e79cfc4a7dfe7cc656f1c7eaae47d8",
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8", "zh:6d65bdb4ec94b6eecc8abe26d94e2ca09262dc1e7a9934db829f418be0119920",
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9", "zh:71adeaf31e41a358ec6095004062e43f56ee7d4b2504e5613ab351d511695641",
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f", "zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6", "zh:89761c15908ccc2cf9c50bb5cb3be45d3ad0c45fc7c608c6b95f48c0288b7160",
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb", "zh:8cc5d7c5939da89cfd01f3e51c84f3576564783acea9db86bd9e32049805ed96",
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b", "zh:987cff8225b1dd436cdcb4fc6228689ae7e4281de6896412a2a9a3325c49f05e",
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380", "zh:991e83ebb89867d71e01a1c215ed159efb425683b0a44707be8579eb0a337f06",
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7", "zh:ab8177ae2d8f5cfa90043a6f867435012cae115f6061b832a7e2462e0ae87a67",
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672", "zh:d1ca34df1398f201274a6a18102975148c10ca15aa43cfc56cc9897620929509",
"zh:d34946f70201baf6dda03e3b294c6bbe40d95d0278e97b9f636ded94822b24ac",
] ]
} }

View File

@@ -5,7 +5,7 @@ terraform {
required_providers { required_providers {
cloudflare = { cloudflare = {
source = "cloudflare/cloudflare" source = "cloudflare/cloudflare"
version = "4.52.0" version = "4.50.0"
} }
} }
} }

View File

@@ -1,13 +1,4 @@
# # See:
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
# For development see:
# - https://immich.app/docs/developer/setup # - https://immich.app/docs/developer/setup
# - https://immich.app/docs/developer/troubleshooting # - https://immich.app/docs/developer/troubleshooting
@@ -25,7 +16,7 @@ services:
context: ../ context: ../
dockerfile: server/Dockerfile dockerfile: server/Dockerfile
target: dev target: dev
restart: unless-stopped restart: always
volumes: volumes:
- ../server:/usr/src/app - ../server:/usr/src/app
- ../open-api:/usr/src/open-api - ../open-api:/usr/src/open-api
@@ -48,7 +39,7 @@ services:
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/ IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
ulimits: ulimits:
nofile: nofile:
soft: 1048576 soft: 1048576
@@ -95,12 +86,12 @@ services:
image: immich-machine-learning-dev:latest image: immich-machine-learning-dev:latest
# extends: # extends:
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build: build:
context: ../machine-learning context: ../machine-learning
dockerfile: Dockerfile dockerfile: Dockerfile
args: args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports: ports:
- 3003:3003 - 3003:3003
volumes: volumes:
@@ -116,13 +107,13 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177 image: redis:6.2-alpine@sha256:905c4ee67b8e0aa955331960d2aa745781e6bd89afc44a8584bfd13bc890f0ae
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.2-pgvectors0.2.0 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file: env_file:
- .env - .env
environment: environment:
@@ -134,6 +125,25 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# immich-prometheus: # immich-prometheus:
# container_name: immich_prometheus # container_name: immich_prometheus

View File

@@ -1,12 +1,3 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
name: immich-prod name: immich-prod
services: services:
@@ -38,12 +29,12 @@ services:
image: immich-machine-learning:latest image: immich-machine-learning:latest
# extends: # extends:
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build: build:
context: ../machine-learning context: ../machine-learning
dockerfile: Dockerfile dockerfile: Dockerfile
args: args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports: ports:
- 3003:3003 - 3003:3003
volumes: volumes:
@@ -56,14 +47,14 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177 image: redis:6.2-alpine@sha256:905c4ee67b8e0aa955331960d2aa745781e6bd89afc44a8584bfd13bc890f0ae
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.2-pgvectors0.2.0 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file: env_file:
- .env - .env
environment: environment:
@@ -75,6 +66,24 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
@@ -82,7 +91,7 @@ services:
container_name: immich_prometheus container_name: immich_prometheus
ports: ports:
- 9090:9090 - 9090:9090
image: prom/prometheus@sha256:9abc6cf6aea7710d163dbb28d8eeb7dc5baef01e38fa4cd146a406dd9f07f70d image: prom/prometheus@sha256:6559acbd5d770b15bb3c954629ce190ac3cbbdb2b7f1c30f0385c4e05104e218
volumes: volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml - ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus - prometheus-data:/prometheus
@@ -94,7 +103,7 @@ services:
command: ['./run.sh', '-disable-reporting'] command: ['./run.sh', '-disable-reporting']
ports: ports:
- 3000:3000 - 3000:3000
image: grafana/grafana:12.0.1-ubuntu@sha256:65575bb9c761335e2ff30e364f21d38632e3b2e75f5f81d83cc92f44b9bbc055 image: grafana/grafana:11.4.0-ubuntu@sha256:afccec22ba0e4815cca1d2bf3836e414322390dc78d77f1851976ffa8d61051c
volumes: volumes:
- grafana-data:/var/lib/grafana - grafana-data:/var/lib/grafana

View File

@@ -1,11 +1,10 @@
# #
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose # WARNING: Make sure to use the docker-compose.yml of the current release:
#
# Make sure to use the docker-compose.yml of the current release:
# #
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml # https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
# #
# The compose file on main may not be compatible with the latest release. # The compose file on main may not be compatible with the latest release.
#
name: immich name: immich
@@ -33,12 +32,12 @@ services:
immich-machine-learning: immich-machine-learning:
container_name: immich_machine_learning container_name: immich_machine_learning
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag. # For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
# Example tag: ${IMMICH_VERSION:-release}-cuda # Example tag: ${IMMICH_VERSION:-release}-cuda
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release} image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration # extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
volumes: volumes:
- model-cache:/cache - model-cache:/cache
env_file: env_file:
@@ -49,24 +48,40 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177 image: docker.io/redis:6.2-alpine@sha256:905c4ee67b8e0aa955331960d2aa745781e6bd89afc44a8584bfd13bc890f0ae
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.2-pgvectors0.2.0 image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
environment: environment:
POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME} POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME} POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums' POSTGRES_INITDB_ARGS: '--data-checksums'
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
# DB_STORAGE_TYPE: 'HDD'
volumes: volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file # Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data - ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always restart: always
volumes: volumes:

View File

@@ -2,8 +2,7 @@
# The location where your uploaded files are stored # The location where your uploaded files are stored
UPLOAD_LOCATION=./library UPLOAD_LOCATION=./library
# The location where your database files are stored
# The location where your database files are stored. Network shares are not supported for the database
DB_DATA_LOCATION=./postgres DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List # To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List

View File

@@ -13,13 +13,6 @@ services:
volumes: volumes:
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver) - /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required) - /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
rknn:
security_opt:
- systempaths=unconfined
- apparmor=unconfined
devices:
- /dev/dri:/dev/dri
cpu: {} cpu: {}
@@ -33,13 +26,6 @@ services:
capabilities: capabilities:
- gpu - gpu
rocm:
group_add:
- video
devices:
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
openvino: openvino:
device_cgroup_rules: device_cgroup_rules:
- 'c 189:* rmw' - 'c 189:* rmw'

View File

@@ -48,7 +48,6 @@ services:
vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2 vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2
devices: devices:
- /dev/dri:/dev/dri - /dev/dri:/dev/dri
- /dev/dxg:/dev/dxg
volumes: volumes:
- /usr/lib/wsl:/usr/lib/wsl - /usr/lib/wsl:/usr/lib/wsl
environment: environment:

View File

@@ -1 +1 @@
22.16.0 22.13.1

View File

@@ -97,7 +97,7 @@ Make sure to [set your reverse proxy](/docs/administration/reverse-proxy/) to al
Also, check the disk space of your reverse proxy. Also, check the disk space of your reverse proxy.
In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails. In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
If you are using Cloudflare Tunnel, please know that they set a maximum filesize of 100 MB that cannot be changed. If you are using Cloudflare Tunnel, please know that they set a maxiumum filesize of 100 MB that cannot be changed.
At times, files larger than this may work, potentially up to 1 GB. However, the official limit is 100 MB. At times, files larger than this may work, potentially up to 1 GB. However, the official limit is 100 MB.
If you are having issues, we recommend switching to a different network deployment. If you are having issues, we recommend switching to a different network deployment.
@@ -117,7 +117,7 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
### Does Immich support reading existing face tag metadata? ### Does Immich support reading existing face tag metadata?
Yes, it creates new faces and persons from the imported asset metadata. For details see the [feature request #4348](https://github.com/immich-app/immich/discussions/4348) and [PR #6455](https://github.com/immich-app/immich/pull/6455). No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
### Does Immich support the filtering of NSFW images? ### Does Immich support the filtering of NSFW images?
@@ -170,7 +170,7 @@ If you aren't able to or prefer not to mount Samba on the host (such as Windows
Below is an example in the `docker-compose.yml`. Below is an example in the `docker-compose.yml`.
Change your username, password, local IP, and share name, and see below where the line `- originals:/usr/src/app/originals`, Change your username, password, local IP, and share name, and see below where the line `- originals:/usr/src/app/originals`,
correlates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like. corrolates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like.
For example you could change `originals:` to `Photos:`, and change `- originals:/usr/src/app/originals` to `Photos:/usr/src/app/photos`. For example you could change `originals:` to `Photos:`, and change `- originals:/usr/src/app/originals` to `Photos:/usr/src/app/photos`.
```diff ```diff
@@ -262,7 +262,7 @@ No, this is not supported. Only models listed in the [Hugging Face][huggingface]
### I want to be able to search in other languages besides English. How can I do that? ### I want to be able to search in other languages besides English. How can I do that?
You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-models) for instructions. You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-model) for instructions.
### Does Immich support Facial Recognition for videos? ### Does Immich support Facial Recognition for videos?

View File

@@ -23,32 +23,16 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored. It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
::: :::
### Automatic Database Dumps ### Automatic Database Backups
:::warning For convenience, Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files. As mentioned above, you should make your own backup of these together with the asset folders as noted below.
There is no monitoring for these dumps and you will not be notified if they are unsuccessful. You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
::: By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
:::caution
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
:::
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
#### Trigger Dump
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
This dumps will count towards the last `X` dumps that will be kept based on your settings.
#### Restoring #### Restoring
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host. We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
Then please follow the steps in the following section for restoring the database. Then please follow the steps in the following section for restoring the database.
### Manual Backup and Restore ### Manual Backup and Restore
@@ -69,7 +53,7 @@ docker compose create # Create Docker containers for Immich apps witho
docker start immich_postgres # Start Postgres server docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up sleep 10 # Wait for Postgres server to start up
# Check the database user if you deviated from the default # Check the database user if you deviated from the default
gunzip --stdout "/path/to/backup/dump.sql.gz" \ gunzip < "/path/to/backup/dump.sql.gz" \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \ | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup | docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
docker compose up -d # Start remainder of Immich apps docker compose up -d # Start remainder of Immich apps
@@ -92,8 +76,10 @@ docker compose create # Create Docker containers for
docker start immich_postgres # Start Postgres server docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up sleep 10 # Wait for Postgres server to start up
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip --stdout` # Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip`
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME> cat < "/dump.sql" \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
exit # Exit the Docker shell exit # Exit the Docker shell
docker compose up -d # Start remainder of Immich apps docker compose up -d # Start remainder of Immich apps
``` ```
@@ -219,10 +205,3 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files. Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface. You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
::: :::
## Backup ordering
A backup of Immich should contain both the database and the asset files. When backing these up it's possible for them to get out of sync, potentially resulting in broken assets after you restore.
The best way of dealing with this is to stop the immich-server container while you take a backup. If nothing is changing then the backup will always be in sync.
If stopping the container is not an option, then the recommended order is to back up the database first, and the filesystem second. This way, the worst case scenario is that there are files on the filesystem that the database doesn't know about. If necessary, these can be (re)uploaded manually after a restore. If the backup is done the other way around, with the filesystem first and the database second, it's possible for the restored database to reference files that aren't in the filesystem backup, thus resulting in broken assets.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -93,7 +93,6 @@ The `.well-known/openid-configuration` part of the url is optional and will be a
## Auto Launch ## Auto Launch
When Auto Launch is enabled, the login page will automatically redirect the user to the OAuth authorization url, to login with OAuth. To access the login screen again, use the browser's back button, or navigate directly to `/auth/login?autoLaunch=0`. When Auto Launch is enabled, the login page will automatically redirect the user to the OAuth authorization url, to login with OAuth. To access the login screen again, use the browser's back button, or navigate directly to `/auth/login?autoLaunch=0`.
Auto Launch can also be enabled on a per-request basis by navigating to `/auth/login?authLaunch=1`, this can be useful in situations where Immich is called from e.g. Nextcloud using the _External sites_ app and the _oidc_ app so as to enable users to directly interact with a logged-in instance of Immich.
## Mobile Redirect URI ## Mobile Redirect URI

View File

@@ -10,16 +10,12 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
## Prerequisites ## Prerequisites
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`. You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
:::note :::note
Immich is known to work with Postgres versions `>= 14, < 18`. Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing.
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`. Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`.
::: :::
## Specifying the connection URL ## Specifying the connection URL
@@ -57,99 +53,21 @@ CREATE DATABASE <immichdatabasename>;
\c <immichdatabasename> \c <immichdatabasename>
BEGIN; BEGIN;
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>; ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
CREATE EXTENSION vchord CASCADE; CREATE EXTENSION vectors;
CREATE EXTENSION earthdistance CASCADE; CREATE EXTENSION earthdistance CASCADE;
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
COMMIT; COMMIT;
``` ```
### Updating VectorChord ### Updating pgvecto.rs
When installing a new version of VectorChord, you will need to manually update the extension and reindex by connecting to the Immich database and running: When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
``` ### Common errors
ALTER EXTENSION vchord UPDATE;
REINDEX INDEX face_index;
REINDEX INDEX clip_index;
```
## Migrating to VectorChord #### Permission denied for view
VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition. If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
### Migrating from pgvecto.rs [vectors-install]: https://docs.pgvecto.rs/getting-started/installation.html
Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so.
The easiest option is to have both extensions installed during the migration:
<details>
<summary>Migration steps (automatic)</summary>
1. Ensure you still have pgvecto.rs installed
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
3. [Install VectorChord][vchord-install]
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
5. Restart the Postgres database
6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client
7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output
8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;`
9. Drop the old schema by running `DROP SCHEMA vectors;`
10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting
11. Restart the Postgres database
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
</details>
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
<details>
<summary>Migration steps (manual)</summary>
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
```sql
SELECT atttypmod as dimsize
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
WHERE c.relkind = 'r'::char
AND f.attnum > 0
AND c.relname = 'smart_search'::text
AND f.attname = 'embedding'::text;
```
2. Remove references to pgvecto.rs using the below SQL commands
```sql
DROP INDEX IF EXISTS clip_index;
DROP INDEX IF EXISTS face_index;
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[];
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[];
```
3. [Install VectorChord][vchord-install]
4. Change the columns back to the appropriate vector types, replacing `<number>` with the number from step 1
```sql
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(<number>);
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
```
5. Start Immich and let it create new indices using VectorChord
</details>
### Migrating from pgvector
<details>
<summary>Migration steps</summary>
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
2. Follow the Prerequisites to install VectorChord
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
5. Start Immich and let it create new indices using VectorChord
</details>
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html
[pg-apt]: https://www.postgresql.org/download/linux/#generic

View File

@@ -22,7 +22,7 @@ server {
client_max_body_size 50000M; client_max_body_size 50000M;
# Set headers # Set headers
proxy_set_header Host $host; proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;

View File

@@ -11,7 +11,6 @@ The `immich-server` docker image comes preinstalled with an administrative CLI (
| `enable-oauth-login` | Enable OAuth login | | `enable-oauth-login` | Enable OAuth login |
| `disable-oauth-login` | Disable OAuth login | | `disable-oauth-login` | Disable OAuth login |
| `list-users` | List Immich users | | `list-users` | List Immich users |
| `version` | Print Immich version |
## How to run a command ## How to run a command
@@ -81,10 +80,3 @@ immich-admin list-users
} }
] ]
``` ```
Print Immich Version
```
immich-admin version
v1.129.0
```

View File

@@ -98,14 +98,6 @@ The default Immich log level is `Log` (commonly known as `Info`). The Immich adm
Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters
You can choose to disable a certain type of machine learning, for example smart search or facial recognition. You can choose to disable a certain type of machine learning, for example smart search or facial recognition.
### URL
The built in (`http://immich-machine-learning:3003`) machine learning server will be configured by default, but you can change this or add additional servers.
Hosting the `immich-machine-learning` container on a machine with a more powerful GPU can be helpful to for processing a large number of photos (such as during batch import) or for faster search.
If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.
### Smart Search ### Smart Search
The [smart search](/docs/features/searching) settings allow you to change the [CLIP model](https://openai.com/research/clip). Larger models will typically provide [more accurate search results](https://github.com/immich-app/immich/discussions/11862) but consume more processing power and RAM. When [changing the CLIP model](/docs/FAQ#can-i-use-a-custom-clip-model) it is mandatory to re-run the Smart Search job on all images to fully apply the change. The [smart search](/docs/features/searching) settings allow you to change the [CLIP model](https://openai.com/research/clip). Larger models will typically provide [more accurate search results](https://github.com/immich-app/immich/discussions/11862) but consume more processing power and RAM. When [changing the CLIP model](/docs/FAQ#can-i-use-a-custom-clip-model) it is mandatory to re-run the Smart Search job on all images to fully apply the change.

View File

@@ -31,7 +31,7 @@ Admin can send a welcome email if the Email option is set, you can learn here ho
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore. Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota by leaving it empty (default). In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota using the value 0 (default).
:::tip :::tip
The system administrator can see the usage quota percentage of all users in Server Stats page. The system administrator can see the usage quota percentage of all users in Server Stats page.

View File

@@ -50,18 +50,19 @@ The Immich CLI is an [npm](https://www.npmjs.com/) package that lets users contr
The Immich backend is divided into several services, which are run as individual docker containers. The Immich backend is divided into several services, which are run as individual docker containers.
1. `immich-server` - Handle and respond to REST API requests, execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.) 1. `immich-server` - Handle and respond to REST API requests
1. `immich-microservices` - Execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
1. `immich-machine-learning` - Execute machine learning models 1. `immich-machine-learning` - Execute machine learning models
1. `postgres` - Persistent data storage 1. `postgres` - Persistent data storage
1. `redis`- Queue management for background jobs 1. `redis`- Queue management for `immich-microservices`
### Immich Server ### Immich Server
The Immich Server is a [TypeScript](https://www.typescriptlang.org/) project written for [Node.js](https://nodejs.org/). It uses the [Nest.js](https://nestjs.com) framework, [Express](https://expressjs.com/) server, and the query builder [Kysely](https://kysely.dev/). The server codebase also loosely follows the [Hexagonal Architecture](<https://en.wikipedia.org/wiki/Hexagonal_architecture_(software)>). Specifically, we aim to separate technology specific implementations (`src/repositories`) from core business logic (`src/services`). The Immich Server is a [TypeScript](https://www.typescriptlang.org/) project written for [Node.js](https://nodejs.org/). It uses the [Nest.js](https://nestjs.com) framework, with [TypeORM](https://typeorm.io/) for database management. The server codebase also loosely follows the [Hexagonal Architecture](<https://en.wikipedia.org/wiki/Hexagonal_architecture_(software)>). Specifically, we aim to separate technology specific implementations (`infra/`) from core business logic (`domain/`).
### API Endpoints #### REST Endpoints
An incoming HTTP request is mapped to a controller (`src/controllers`). Controllers are collections of HTTP endpoints. Each controller usually implements the following CRUD operations for its respective resource type: The server is a list of HTTP endpoints and associated handlers (controllers). Each controller usually implements the following CRUD operations:
- `POST` `/<type>` - **Create** - `POST` `/<type>` - **Create**
- `GET` `/<type>` - **Read** (all) - `GET` `/<type>` - **Read** (all)
@@ -69,13 +70,13 @@ An incoming HTTP request is mapped to a controller (`src/controllers`). Controll
- `PUT` `/<type>/:id` - **Updated** (by id) - `PUT` `/<type>/:id` - **Updated** (by id)
- `DELETE` `/<type>/:id` - **Delete** (by id) - `DELETE` `/<type>/:id` - **Delete** (by id)
### Domain Transfer Objects (DTOs) #### DTOs
The server uses [Domain Transfer Objects](https://en.wikipedia.org/wiki/Data_transfer_object) as public interfaces for the inputs (query, params, and body) and outputs (response) for each endpoint. DTOs translate to [OpenAPI](./open-api.md) schemas and control the generated code used by each client. The server uses [Domain Transfer Objects](https://en.wikipedia.org/wiki/Data_transfer_object) as public interfaces for the inputs (query, params, and body) and outputs (response) for each endpoint. DTOs translate to [OpenAPI](./open-api.md) schemas and control the generated code used by each client.
### Background Jobs ### Microservices
Immich uses a [worker](https://github.com/immich-app/immich/blob/main/server/src/utils/misc.ts#L266) to run background jobs. These jobs include: The Immich Microservices image uses the same `Dockerfile` as the Immich Server, but with a different entrypoint. The Immich Microservices service mainly handles executing jobs, which include the following:
- Thumbnail Generation - Thumbnail Generation
- Metadata Extraction - Metadata Extraction

View File

@@ -1,14 +1,14 @@
# Database Migrations # Database Migrations
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration. After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
1. Run the command 1. Run the command
```bash ```bash
npm run migrations:generate <migration-name> npm run typeorm:migrations:generate <migration-name>
``` ```
2. Check if the migration file makes sense. 2. Check if the migration file makes sense.
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor. 3. Move the migration file to folder `./server/src/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately. The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.

View File

@@ -1,481 +0,0 @@
---
title: Devcontainers
sidebar_position: 3
---
# Development with Dev Containers
Dev Containers provide a consistent, reproducible development environment using Docker containers. With a single click, you can get started with an Immich development environment on Mac, Linux, Windows, or in the cloud using GitHub Codespaces.
[![Open in VSCode Containers](https://img.shields.io/static/v1?label=VSCode%20DevContainer&message=Immich&color=blue)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/immich-app/immich/)
[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/immich-app/immich/)
[Learn more about Dev Containers](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers)
## Prerequisites
Before getting started, ensure you have:
- **Docker Desktop** (latest version)
- [Mac](https://docs.docker.com/desktop/install/mac-install/)
- [Windows](https://docs.docker.com/desktop/install/windows-install/) (with WSL2 backend recommended)
- [Linux](https://docs.docker.com/desktop/install/linux-install/)
- **Visual Studio Code** with the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
- **Git** for cloning the repository
- At least **8GB of RAM** (16GB recommended)
- **20GB of free disk space**
:::tip Alternative Development Environments
While this guide focuses on VS Code, you have many options for Dev Container development:
**Local Editors:**
- [IntelliJ IDEA](https://www.jetbrains.com/help/idea/connect-to-devcontainer.html) - Full JetBrains IDE support
- [neovim](https://github.com/jamestthompson3/nvim-remote-containers) - Lightweight terminal-based editor
- [Emacs](https://github.com/emacs-lsp/lsp-docker) - Extensible text editor
- [DevContainer CLI](https://github.com/devcontainers/cli) - Command-line interface
**Cloud-Based Solutions:**
- [GitHub Codespaces](https://github.com/features/codespaces) - Fully integrated with GitHub, excellent devcontainer.json support
- [GitPod](https://www.gitpod.io) - SaaS platform with recent Dev Container support (historically used gitpod.yml)
**Self-Hostable Options:**
- [Coder](https://coder.com) - Enterprise-focused, requires Terraform knowledge, self-managed
- [DevPod](https://devpod.sh) - Client-only tool with excellent devcontainer.json support, works with any provider (local, cloud, or on-premise)
:::
## Dev Container Services
The Dev Container environment consists of the following services:
| Service | Container Name | Description | Ports |
| ---------------- | ------------------------- | --------------------------------------------------------- | ----------------------------------------------------------------------- |
| Server & Web | `immich-server` | Runs both API server and web frontend in development mode | 2283 (API)<br/>3000 (Web)<br/>9230 (Workers Debug)<br/>9231 (API Debug) |
| Database | `database` | PostgreSQL database | 5432 |
| Cache | `redis` | Valkey cache server | 6379 |
| Machine Learning | `immich-machine-learning` | Immich ML model inference server | 3003 |
## Getting Started
### Step 1: Clone the Repository
```bash
git clone https://github.com/immich-app/immich.git
cd immich
```
### Step 2: Configure Environment Variables
The immich dev containers read environment variables from your shell environment, not from `.env` files. This allows them to work in cloud environments without pre-configuration.
:::important Required Configuration
When running locally, and if you want to create (or use an existing) DB and/or photo storage folder, you must set the `UPLOAD_LOCATION` variable in your shell environment before launching the Dev Container. This determines where uploaded files are stored and also where the DB stores it data.
```bash
# Set temporarily for current session
export UPLOAD_LOCATION=/opt/dev_upload_folder
# Or add to your shell profile for persistence
# (~/.bashrc, ~/.zshrc, ~/.bash_profile, etc.)
echo 'export UPLOAD_LOCATION=/opt/dev_upload_folder' >> ~/.bashrc
source ~/.bashrc
```
:::
### Step 3: Launch the Dev Container
#### Using VS Code UI:
1. Open the cloned repository in VS Code
2. Press `F1` or `Ctrl/Cmd+Shift+P` to open the command palette
3. Type and select "Dev Containers: Rebuild and Reopen in Container"
4. Select "Immich - Backend, Frontend and ML" from the list
5. Wait for the container to build and start (this may take several minutes on first run)
#### Using VS Code Quick Actions:
1. Open the repository in VS Code
2. You should see a popup asking if you want to reopen in a container
3. Click "Reopen in Container"
#### Using Command Line:
```bash
# Using the DevContainer CLI
devcontainer up --workspace-folder .
```
## Environment Variable Details
### How Dev Containers Handle Environment Variables
Unlike the Immich developer setup based on Docker Compose which uses `.env` files, Immich Dev Containers read environment variables from your shell environment. This is configured in `.devcontainer/devcontainer.json`:
```json
"remoteEnv": {
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
```
The `${localEnv:VARIABLE:default}` syntax reads from your shell environment with optional defaults.
### Upload Location Path Resolution
The `UPLOAD_LOCATION` environment variable controls where files are stored:
**Default:** `./Library` (relative to the `docker` directory)
**Resolved to:** `<immich-root>/docker/Library`
**Bind Mounts Created:**
```yaml
# From .devcontainer/server/container-compose-overrides.yml
- ${UPLOAD_LOCATION-./Library}/photos:/workspaces/immich/server/upload
- ${UPLOAD_LOCATION-./Library}/postgres:/var/lib/postgresql/data
```
### Database Configuration
These variables have sensible defaults (for development) but can be customized:
| Variable | Default | Description |
| ------------------ | ---------- | ------------------- |
| `DB_PASSWORD` | `postgres` | PostgreSQL password |
| `DB_USERNAME` | `postgres` | PostgreSQL username |
| `DB_DATABASE_NAME` | `immich` | Database name |
### Setting Environment Variables
Add these to your shell profile (`~/.bashrc`, `~/.zshrc`, `~/.bash_profile`, etc.):
```bash
# Required
export UPLOAD_LOCATION=./Library # or absolute path
# Optional (only if using non-default values)
export DB_PASSWORD=your_password
export DB_USERNAME=your_username
export DB_DATABASE_NAME=your_database
```
Remember to reload your shell configuration:
```bash
source ~/.bashrc # or ~/.zshrc, etc.
```
## Git Configuration
### SSH Keys and Authentication
To use your SSH keys for GitHub access inside the Dev Container:
1. **Start SSH Agent** on your host machine:
```bash
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/id_rsa # or your key path
```
2. **VS Code automatically forwards your SSH agent** to the container
For detailed instructions, see the [VS Code guide on sharing Git credentials](https://code.visualstudio.com/remote/advancedcontainers/sharing-git-credentials).
### Commit Signing
To use your SSH key for commit signing, see the [GitHub guide on SSH commit signing](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key#telling-git-about-your-ssh-key).
## Development Workflow
### Automatic Setup
When the Dev Container starts, it automatically:
1. **Runs post-create script** (`container-server-post-create.sh`):
- Adjusts file permissions for the `node` user
- Installs dependencies: `npm install` in all packages
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
2. **Starts development servers** via VS Code tasks:
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
- Both servers watch for file changes and recompile automatically
3. **Configures port forwarding**:
- Web UI: http://localhost:3000 (opens automatically)
- API: http://localhost:2283
- Debug ports: 9230 (workers), 9231 (API)
:::info
The Dev Container setup replaces the `make dev` command from the traditional setup. All services start automatically when you open the container.
:::
### Accessing Services
Once running, you can access:
| Service | URL | Description |
| -------- | --------------------- | ---------------------------------------------------------------------------------------------- |
| Web UI | http://localhost:3000 | Main web interface |
| API | http://localhost:2283 | REST API endpoints (Not used directly, web UI will expose this over http://localhost:3000/api) |
| Database | localhost:5432 | PostgreSQL (username: `postgres`) (Not used directly) |
### Connecting Mobile Apps
To connect the mobile app to your Dev Container:
1. Find your machine's IP address
2. In the mobile app, use: `http://YOUR_IP:3000/api`
3. Ensure your firewall allows connections on port 2283
### Making Code Changes
- **Server code** (`/server`): Changes trigger automatic restart
- **Web code** (`/web`): Changes trigger hot module replacement
- **Database migrations**: Run `npm run sync:sql` in the server directory
- **API changes**: Regenerate TypeScript SDK with `make open-api`
## Testing
### Running Tests
The Dev Container supports multiple ways to run tests:
#### Using Make Commands (Recommended)
```bash
# Run tests for specific components
make test-server # Server unit tests
make test-web # Web unit tests
make test-e2e # End-to-end tests
make test-cli # CLI tests
# Run all tests
make test-all # Runs tests for all components
# Medium tests (integration tests)
make test-medium-dev # End-to-end tests
```
#### Using NPM Directly
```bash
# Server tests
cd /workspaces/immich/server
npm test # Run all tests
npm run test:watch # Watch mode
npm run test:cov # Coverage report
# Web tests
cd /workspaces/immich/web
npm test # Run all tests
npm run test:watch # Watch mode
# E2E tests
cd /workspaces/immich/e2e
npm run test # Run API tests
npm run test:web # Run web UI tests
```
### Code Quality Commands
```bash
# Linting
make lint-server # Lint server code
make lint-web # Lint web code
make lint-all # Lint all components
# Formatting
make format-server # Format server code
make format-web # Format web code
make format-all # Format all code
# Type checking
make check-server # Type check server
make check-web # Type check web
make check-all # Check all components
# Complete hygiene check
make hygiene-all # Runs lint, format, check, SQL sync, and audit
```
### Additional Make Commands
```bash
# Build commands
make build-server # Build server
make build-web # Build web app
make build-all # Build everything
# API generation
make open-api # Generate OpenAPI specs
make open-api-typescript # Generate TypeScript SDK
make open-api-dart # Generate Dart SDK
# Database
make sql # Sync database schema
# Dependencies
make install-server # Install server dependencies
make install-web # Install web dependencies
make install-all # Install all dependencies
```
### Debugging
The Dev Container is pre-configured for debugging:
1. **API Server Debugging**:
- Set breakpoints in VS Code
- Press `F5` or use "Run and Debug" panel
- Select "Attach to Server" configuration
- Debug port: 9231
2. **Worker Debugging**:
- Use "Attach to Workers" configuration
- Debug port: 9230
3. **Web Debugging**:
- Use browser DevTools
- VS Code debugger for Chrome/Edge extensions supported
## Troubleshooting
### Common Issues
#### Permission Errors
**Problem**: `EACCES` or permission denied errors
**Solution**:
- The Dev Container runs as the `node` user (UID 1000)
- If your host UID differs, you may see permission issues
- Try rebuilding the container: "Dev Containers: Rebuild Container"
#### Container Won't Start
**Problem**: Dev Container fails to start or build
**Solution**:
1. Check Docker is running: `docker ps`
2. Clean Docker resources: `docker system prune -a`
3. Check available disk space
4. Review Docker Desktop resource limits
#### Port Already in Use
**Problem**: "Port 3000/2283 is already in use"
**Solution**:
1. Check for conflicting services: `lsof -i :3000` (macOS/Linux)
2. Stop conflicting services or change port mappings
3. Restart Docker Desktop
#### Upload Location Not Set
**Problem**: Errors about missing UPLOAD_LOCATION
**Solution**:
1. Set the environment variable: `export UPLOAD_LOCATION=./Library`
2. Add to your shell profile for persistence
3. Restart your terminal and VS Code
#### Database Connection Failed
**Problem**: Cannot connect to PostgreSQL
**Solution**:
1. Ensure all containers are running: `docker ps`
2. Check logs: "Dev Containers: Show Container Log"
3. Verify database credentials match environment variables
### Getting Help
If you encounter issues:
1. Check container logs: View → Output → Select "Dev Containers"
2. Rebuild without cache: "Dev Containers: Rebuild Container Without Cache"
3. Review [common Docker issues](https://docs.docker.com/desktop/troubleshoot/)
4. Ask in [Discord](https://discord.immich.app) `#help-desk-support` channel
## Mobile Development
While the Dev Container focuses on server and web development, you can connect mobile apps for testing:
### Connecting iOS/Android Apps
1. **Ensure API is accessible**:
```bash
# Find your machine's IP
# macOS
ipconfig getifaddr en0
# Linux
hostname -I
# Windows (in WSL2)
ip addr show eth0
```
2. **Configure mobile app**:
- Server URL: `http://YOUR_IP:2283/api`
- Ensure firewall allows port 2283
3. **For full mobile development**, see the [mobile development guide](/docs/developer/setup) which covers:
- Flutter setup
- Running on simulators/devices
- Mobile-specific debugging
## Advanced Configuration
### Custom VS Code Extensions
Add extensions to `.devcontainer/devcontainer.json`:
```json
"customizations": {
"vscode": {
"extensions": [
"your.extension-id"
]
}
}
```
### Additional Services
To add services (e.g., Redis Commander), modify:
1. `/docker/docker-compose.dev.yml` - Add service definition
2. `/.devcontainer/server/container-compose-overrides.yml` - Add overrides if needed
### Resource Limits
Adjust Docker Desktop resources:
- **macOS/Windows**: Docker Desktop → Settings → Resources
- **Linux**: Modify Docker daemon configuration
Recommended minimums:
- CPU: 4 cores
- Memory: 8GB
- Disk: 20GB
## Next Steps
- Read the [architecture overview](/docs/developer/architecture)
- Learn about [database migrations](/docs/developer/database-migrations)
- Explore [API documentation](/docs/api)
- Join `#immich` on [Discord](https://discord.immich.app)

View File

@@ -63,41 +63,22 @@ If you only want to do web development connected to an existing, remote backend,
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
``` ```
If you're using PowerShell on Windows you may need to set the env var separately like so:
```powershell
$env:IMMICH_SERVER_URL = "https://demo.immich.app/"
npm run dev
```
#### `@immich/ui` #### `@immich/ui`
To see local changes to `@immich/ui` in Immich, do the following: To see local changes to `@immich/ui` in Immich, do the following:
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui` 1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
2. Build the `@immich/ui` project via `npm run build` 1. Build the `@immich/ui` project via `npm run build`
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`) 1. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`) 1. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';` 1. Start up the stack via `make dev`
6. Start up the stack via `make dev` 1. After making changes in `@immich/ui`, rebuild it (`npm run build`)
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
### Mobile app ### Mobile app
#### Setup The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
1. Setup Flutter toolchain using FVM. Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
2. Run `flutter pub get` to install the dependencies.
3. Run `make translation` to generate the translation file.
4. Run `fvm flutter run` to start the app.
#### Translation
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
```bash
make translation
```
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above. The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
@@ -115,72 +96,32 @@ Note: Activating the license is not required.
### VSCode ### VSCode
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions. These extensions are listed in the `extensions.json` file under `.vscode/` and should appear as workspace recommendations. Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions.
Here are the settings we use, they should be active as workspace settings (`settings.json`): in User `settings.json` (`cmd + shift + p` and search for `Open User Settings JSON`) add the following:
```json title="settings.json" ```json title="settings.json"
{ {
"[css]": { "editor.formatOnSave": true,
"[javascript][typescript][css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true, "editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2 "editor.tabSize": 2
}, },
"svelte.enable-ts-plugin": true,
"eslint.validate": ["javascript", "svelte"],
"[dart]": { "[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.selectionHighlight": false, "editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false, "editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first", "editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets", "editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off" "editor.wordBasedSuggestions": "off",
}, "editor.defaultFormatter": "Dart-Code.dart-code"
"[javascript]": { }
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
},
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"
} }
``` ```

View File

@@ -1,19 +0,0 @@
# Chromecast support
Immich supports the Google's Cast protocol so that photos and videos can be cast to devices such as a Chromecast and a Nest Hub. This feature is considered experimental and has several important limitations listed below. Currently, this feature is only supported by the web client, support on Android and iOS is planned for the future.
## Enable Google Cast Support
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retreive them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
You can enable Google Cast support through `Account Settings > Features > Cast > Google Cast`
<img src={require('./img/gcast-enable.webp').default} width="70%" title='Enable Google Cast Support' />
## Limitations
To use casting with Immich, there are a few prerequisites:
1. Your instance must be accessed via an HTTPS connection in order for the casting menu to show.
2. Your instance must be publicly accessible via HTTPS and a DNS record for the server must be accessible via Google's DNS servers (`8.8.8.8` and `8.8.4.4`)
3. Videos must be in a format that is compatible with Google Cast. For more info, check out [Google's documentation](https://developers.google.com/cast/docs/media)

View File

@@ -42,12 +42,6 @@ docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key. Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
This `docker run` command will directly run the command `immich` inside the container. You can directly append the desired parameters (see under "usage") to the commandline like this:
```bash
docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich-instance/api -e IMMICH_API_KEY=your-api-key ghcr.io/immich-app/immich-cli:latest upload -a -c 5 --recursive directory/
```
## Usage ## Usage
<details> <details>
@@ -90,22 +84,19 @@ Usage: immich upload [paths...] [options]
Upload assets Upload assets
Arguments: Arguments:
paths One or more paths to assets to be uploaded paths One or more paths to assets to be uploaded
Options: Options:
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE) -r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
-i, --ignore <pattern> Pattern to ignore (env: IMMICH_IGNORE_PATHS) -i, --ignore [paths...] Paths to ignore (default: [], env: IMMICH_IGNORE_PATHS)
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH) -h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN) -H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM) -a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME) -A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN) -n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY) -c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
-j, --json-output Output detailed information in json format (default: false, env: IMMICH_JSON_OUTPUT) --delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS) --help display help for command
--no-progress Hide progress bars (env: IMMICH_PROGRESS_BAR)
--watch Watch for changes and upload automatically (default: false, env: IMMICH_WATCH_CHANGES)
--help display help for command
``` ```
</details> </details>
@@ -121,7 +112,7 @@ You begin by authenticating to your Immich server. For instance:
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
``` ```
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/immich/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually. This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
Once you are authenticated, you can upload assets to your Immich server. Once you are authenticated, you can upload assets to your Immich server.
@@ -175,16 +166,6 @@ By default, hidden files are skipped. If you want to include hidden files, use t
immich upload --include-hidden --recursive directory/ immich upload --include-hidden --recursive directory/
``` ```
You can use the `--json-output` option to get a json printed which includes
three keys: `newFiles`, `duplicates` and `newAssets`. Due to some logging
output you will need to strip the first three lines of output to get the json.
For example to get a list of files that would be uploaded for further
processing:
```bash
immich upload --dry-run . | tail -n +4 | jq .newFiles[]
```
### Obtain the API Key ### Obtain the API Key
The API key can be obtained in the user setting panel on the web interface. The API key can be obtained in the user setting panel on the web interface.

View File

@@ -69,8 +69,6 @@ Navigating to Administration > Settings > Machine Learning Settings > Facial Rec
:::tip :::tip
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa. It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
You can learn how the tune the result in this [Guide](/docs/guides/better-facial-clusters)
::: :::
### Facial recognition model ### Facial recognition model

View File

@@ -121,6 +121,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml [hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html [nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
[jellyfin-lp]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#low-power-encoding [jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#known-issues-and-limitations-on-linux [jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases [libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

View File

Before

Width:  |  Height:  |  Size: 4.9 MiB

After

Width:  |  Height:  |  Size: 4.9 MiB

View File

@@ -37,7 +37,7 @@ To validate that Immich can reach your external library, start a shell inside th
### Exclusion Patterns ### Exclusion Patterns
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Under the hood, Immich uses the [glob](https://www.npmjs.com/package/glob) package to match patterns, so please refer to [their documentation](https://github.com/isaacs/node-glob#glob-primer) to see what patterns are supported.
Some basic examples: Some basic examples:
@@ -48,11 +48,7 @@ Some basic examples:
Special characters such as @ should be escaped, for instance: Special characters such as @ should be escaped, for instance:
- `**/\@eaDir/**` will exclude all files in any directory named `@eaDir` - `**/\@eadir/**` will exclude all files in any directory named `@eadir`
:::info
Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package to process exclusion patterns, and sometimes those patterns are translated into [Postgres LIKE patterns](https://www.postgresql.org/docs/current/functions-matching.html). The intention is to support basic folder exclusions but we recommend against advanced usage since those can't reliably be translated to the Postgres syntax. Please refer to the [glob documentation](https://github.com/isaacs/node-glob#glob-primer) for a basic overview on glob patterns.
:::
### Automatic watching (EXPERIMENTAL) ### Automatic watching (EXPERIMENTAL)
@@ -62,7 +58,7 @@ If your photos are on a network drive, automatic file watching likely won't work
#### Troubleshooting #### Troubleshooting
If you encounter an `ENOSPC` error, you need to increase your file watcher limit. In sysctl, this key is called `fs.inotify.max_user_watches` and has a default value of 8192. Increase this number to a suitable value greater than the number of files you will be watching. Note that Immich has to watch all files in your import paths including any ignored files. If you encounter an `ENOSPC` error, you need to increase your file watcher limit. In sysctl, this key is called `fs.inotify.max_user_watched` and has a default value of 8192. Increase this number to a suitable value greater than the number of files you will be watching. Note that Immich has to watch all files in your import paths including any ignored files.
``` ```
ERROR [LibraryService] Library watcher for library c69faf55-f96d-4aa0-b83b-2d80cbc27d98 encountered error: Error: ENOSPC: System limit for number of file watchers reached, watch '/media/photo.jpg' ERROR [LibraryService] Library watcher for library c69faf55-f96d-4aa0-b83b-2d80cbc27d98 encountered error: Error: ENOSPC: System limit for number of file watchers reached, watch '/media/photo.jpg'
@@ -72,7 +68,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
### Nightly job ### Nightly job
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page. There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
## Usage ## Usage
@@ -95,7 +91,7 @@ The `immich-server` container will need access to the gallery. Modify your docke
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro + - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
+ - /home/user/old-pics:/mnt/media/old-pics:ro + - /home/user/old-pics:/mnt/media/old-pics:ro
+ - /mnt/media/videos:/mnt/media/videos:ro + - /mnt/media/videos:/mnt/media/videos:ro
+ - /mnt/media/videos2:/mnt/media/videos2 # WARNING: Immich will be able to delete the files in this folder, as it does not end with :ro + - /mnt/media/videos2:/mnt/media/videos2 # the files in this folder can be deleted, as it does not end with :ro
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system. + - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
``` ```
@@ -112,16 +108,14 @@ _Remember to run `docker compose up -d` to register the changes. Make sure you c
These actions must be performed by the Immich administrator. These actions must be performed by the Immich administrator.
- Click on your avatar on the upper right corner - Click on Administration -> Libraries
- Click on Administration -> External Libraries - Click on Create External Library
- Click on Create an external library…
- Select which user owns the library, this can not be changed later - Select which user owns the library, this can not be changed later
- Enter `/mnt/media/christmas-trip` then click Add
- Click on Save
- Click the drop-down menu on the newly created library
- Click on Scan
- Click the drop-down menu on the newly created library - Click the drop-down menu on the newly created library
- Click on Rename Library and rename it to "Christmas Trip" - Click on Rename Library and rename it to "Christmas Trip"
- Click Edit Import Paths
- Click on Add Path
- Enter `/mnt/media/christmas-trip` then click Add
NOTE: We have to use the `/mnt/media/christmas-trip` path and not the `/mnt/nas/christmas-trip` path since all paths have to be what the Docker containers see. NOTE: We have to use the `/mnt/media/christmas-trip` path and not the `/mnt/nas/christmas-trip` path since all paths have to be what the Docker containers see.

View File

@@ -11,9 +11,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
- ARM NN (Mali) - ARM NN (Mali)
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher) - CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
- ROCm (AMD GPUs) - OpenVINO (Intel discrete GPUs such as Iris Xe and Arc)
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
- RKNN (Rockchip)
## Limitations ## Limitations
@@ -21,7 +19,6 @@ You do not need to redo any machine learning jobs after enabling hardware accele
- Only Linux and Windows (through WSL2) servers are supported. - Only Linux and Windows (through WSL2) servers are supported.
- ARM NN is only supported on devices with Mali GPUs. Other Arm devices are not supported. - ARM NN is only supported on devices with Mali GPUs. Other Arm devices are not supported.
- Some models may not be compatible with certain backends. CUDA is the most reliable. - Some models may not be compatible with certain backends. CUDA is the most reliable.
- Search latency isn't improved by ARM NN due to model compatibility issues preventing its use. However, smart search jobs do make use of ARM NN.
## Prerequisites ## Prerequisites
@@ -36,47 +33,29 @@ You do not need to redo any machine learning jobs after enabling hardware accele
- The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere - The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere
- The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file - The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings - Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings
- In particular, the `MACHINE_LEARNING_ANN_FP16_TURBO` can significantly improve performance at the cost of very slightly lower accuracy
#### CUDA #### CUDA
- The GPU must have compute capability 5.2 or greater. - The GPU must have compute capability 5.2 or greater.
- The server must have the official NVIDIA driver installed. - The server must have the official NVIDIA driver installed.
- The installed driver must be >= 545 (it must support CUDA 12.3). - The installed driver must be >= 535 (it must support CUDA 12.2).
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed. - On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
#### ROCm
- The GPU must be supported by ROCm. If it isn't officially supported, you can attempt to use the `HSA_OVERRIDE_GFX_VERSION` environmental variable: `HSA_OVERRIDE_GFX_VERSION=<a supported version, e.g. 10.3.0>`. If this doesn't work, you might need to also set `HSA_USE_SVM=0`.
- The ROCm image is quite large and requires at least 35GiB of free disk space. However, pulling later updates to the service through Docker will generally only amount to a few hundred megabytes as the rest will be cached.
- This backend is new and may experience some issues. For example, GPU power consumption can be higher than usual after running inference, even if the machine learning service is idle. In this case, it will only go back to normal after being idle for 5 minutes (configurable with the [MACHINE_LEARNING_MODEL_TTL](/docs/install/environment-variables) setting).
#### OpenVINO #### OpenVINO
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM. - The server must have a discrete GPU, i.e. Iris Xe or Arc. Expect issues when attempting to use integrated graphics.
- Ensure the server's kernel version is new enough to use the device for hardware accceleration. - Ensure the server's kernel version is new enough to use the device for hardware accceleration.
- Expect higher RAM usage when using OpenVINO compared to CPU processing.
#### RKNN
- You must have a supported Rockchip SoC: only RK3566, RK3568, RK3576 and RK3588 are supported at this moment.
- Make sure you have the appropriate linux kernel driver installed
- This is usually pre-installed on the device vendor's Linux images
- RKNPU driver V0.9.8 or later must be available in the host server
- You may confirm this by running `cat /sys/kernel/debug/rknpu/version` to check the version
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for RKNN specific settings
- In particular, setting `MACHINE_LEARNING_RKNN_THREADS` to 2 or 3 can _dramatically_ improve performance for RK3576 and RK3588 compared to the default of 1, at the expense of multiplying the amount of RAM each model uses by that amount.
## Setup ## Setup
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`. 1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend. 2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino, rknn] to the `image` section's tag at the end of the line. 3. Still in `immich-machine-learning`, add one of -[armnn, cuda, openvino] to the `image` section's tag at the end of the line.
4. Redeploy the `immich-machine-learning` container with these updated settings. 4. Redeploy the `immich-machine-learning` container with these updated settings.
### Confirming Device Usage ### Confirming Device Usage
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel, `intel_gpu_top` for Intel, and `radeontop` for AMD. You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel and `intel_gpu_top` for Intel.
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN. You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
@@ -147,12 +126,3 @@ Note that you should increase job concurrencies to increase overall utilization
- If you encounter an error when a model is running, try a different model to see if the issue is model-specific. - If you encounter an error when a model is running, try a different model to see if the issue is model-specific.
- You may want to increase concurrency past the default for higher utilization. However, keep in mind that this will also increase VRAM consumption. - You may want to increase concurrency past the default for higher utilization. However, keep in mind that this will also increase VRAM consumption.
- Larger models benefit more from hardware acceleration, if you have the VRAM for them. - Larger models benefit more from hardware acceleration, if you have the VRAM for them.
- Compared to ARM NN, RKNPU has:
- Wider model support (including for search, which ARM NN does not accelerate)
- Less heat generation
- Very slightly lower accuracy (RKNPU always uses FP16, while ARM NN by default uses higher precision FP32 unless `MACHINE_LEARNING_ANN_FP16_TURBO` is enabled)
- Varying speed (tested on RK3588):
- If `MACHINE_LEARNING_RKNN_THREADS` is at the default of 1, RKNPU will have substantially lower throughput for ML jobs than ARM NN in most cases, but similar latency (such as when searching)
- If `MACHINE_LEARNING_RKNN_THREADS` is set to 3, it will be somewhat faster than ARM NN at FP32, but somewhat slower than ARM NN if `MACHINE_LEARNING_ANN_FP16_TURBO` is enabled
- When other tasks also use the GPU (like transcoding), RKNPU has a significant advantage over ARM NN as it uses the otherwise idle NPU instead of competing for GPU usage
- Lower RAM usage if `MACHINE_LEARNING_RKNN_THREADS` is at the default of 1, but significantly higher if greater than 1 (which is necessary for it to fully utilize the NPU and hence be comparable in speed to ARM NN)

File diff suppressed because it is too large Load Diff

View File

@@ -8,23 +8,22 @@ For the full list, refer to the [Immich source code](https://github.com/immich-a
## Image formats ## Image formats
| Format | Extension(s) | Supported? | Notes | | Format | Extension(s) | Supported? | Notes |
| :---------- | :---------------------------- | :----------------: | :-------------- | | :-------- | :---------------------------- | :----------------: | :-------------- |
| `AVIF` | `.avif` | :white_check_mark: | | | `AVIF` | `.avif` | :white_check_mark: | |
| `BMP` | `.bmp` | :white_check_mark: | | | `BMP` | `.bmp` | :white_check_mark: | |
| `GIF` | `.gif` | :white_check_mark: | | | `GIF` | `.gif` | :white_check_mark: | |
| `HEIC` | `.heic` | :white_check_mark: | | | `HEIC` | `.heic` | :white_check_mark: | |
| `HEIF` | `.heif` | :white_check_mark: | | | `HEIF` | `.heif` | :white_check_mark: | |
| `JPEG 2000` | `.jp2` | :white_check_mark: | | | `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
| `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | | | `JPEG XL` | `.jxl` | :white_check_mark: | |
| `JPEG XL` | `.jxl` | :white_check_mark: | | | `PNG` | `.webp` | :white_check_mark: | |
| `PNG` | `.png` | :white_check_mark: | | | `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop | | `RAW` | `.raw` | :white_check_mark: | |
| `RAW` | `.raw` | :white_check_mark: | | | `RW2` | `.rw2` | :white_check_mark: | |
| `RW2` | `.rw2` | :white_check_mark: | | | `SVG` | `.svg` | :white_check_mark: | |
| `SVG` | `.svg` | :white_check_mark: | | | `TIFF` | `.tif` `.tiff` | :white_check_mark: | |
| `TIFF` | `.tif` `.tiff` | :white_check_mark: | | | `WEBP` | `.webp` | :white_check_mark: | |
| `WEBP` | `.webp` | :white_check_mark: | |
## Video formats ## Video formats

View File

@@ -1,72 +0,0 @@
# Better Facial Recognition Clusters
## Purpose
This guide explains how to optimize facial recognition in systems with large image libraries. By following these steps, you'll achieve better clustering of faces, reducing the need for manual merging.
---
## Important Notes
- **Best Suited For:** Large image libraries after importing a significant number of images.
- **Warning:** This method deletes all previously assigned names.
- **Tip:** **Always take a [backup](/docs/administration/backup-and-restore#database) before proceeding!**
---
## Step-by-Step Instructions
### Objective
To enhance face clustering and ensure the model effectively identifies faces using qualitative initial data.
---
### Steps
#### 1. Adjust Machine Learning Settings
Navigate to:
**Admin → Administration → Settings → Machine Learning Settings**
Make the following changes:
- **Maximum recognition distance (Optional):**
Lower this value, e.g., to **0.4**, if the library contains people with similar facial features.
- **Minimum recognized faces:**
Set this to a **high value** (e.g., 20 For libraries with a large amount of assets (~100K+), and 10 for libraries with medium amount of assets (~40K+)).
> A high value ensures clusters only include faces that appear at least 20/`value` times in the library, improving the initial clustering process.
---
#### 2. Run Reset Jobs
Go to:
**Admin → Administration → Settings → Jobs**
Perform the following:
1. **FACIAL RECOGNITION → Reset**
> These reset jobs rebuild the recognition model based on the new settings.
---
#### 3. Refine Recognition with Lower Thresholds
Once the reset jobs are complete, refine the recognition as follows:
- **Step 1:**
Return to **Minimum recognized faces** in Machine Learning Settings and lower the value to **10** (In medium libraries we will lower the value from 10 to 5).
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
- **Step 2:**
Lower the value again to **3**.
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
:::tip try different values
For certain libraries with a larger or smaller amount of assets, other settings will be better or worse. It is recommended to try different values **before assigning names** and see which settings work best for your library.
:::
---

View File

@@ -6,7 +6,7 @@ This guide explains how to store generated and raw files with docker's volume mo
It is important to remember to update the backup settings after following the guide to back up the new backup paths if using automatic backup tools, especially `profile/`. It is important to remember to update the backup settings after following the guide to back up the new backup paths if using automatic backup tools, especially `profile/`.
::: :::
In our `.env` file, we will define the paths we want to use. Note that you don't have to define all of these: UPLOAD_LOCATION will be the base folder that files are stored in by default, with the other paths acting as overrides. In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server
```diff title=".env" ```diff title=".env"
# You can find documentation for all the supported environment variables [here](/docs/install/environment-variables) # You can find documentation for all the supported environment variables [here](/docs/install/environment-variables)
@@ -21,7 +21,7 @@ In our `.env` file, we will define the paths we want to use. Note that you don't
... ...
``` ```
After defining the locations of these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container. These paths are where the mount attaches inside of the container, so don't change those. After defining the locations of these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container.
```diff title="docker-compose.yml" ```diff title="docker-compose.yml"
services: services:
@@ -35,8 +35,7 @@ services:
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
``` ```
After making this change, you have to move the files over to the new folders to make sure Immich can find everything it needs. If you haven't uploaded anything important yet, you can also reset Immich entirely by deleting the database folder. Restart Immich to register the changes.
Then restart Immich to register the changes:
``` ```
docker compose up -d docker compose up -d

View File

@@ -14,14 +14,14 @@ online generators you can use.
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.) 2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
3. Save your selections. Reload the map, and enjoy your custom map style! 3. Save your selections. Reload the map, and enjoy your custom map style!
## Use MapTiler to build a custom style ## Use Maptiler to build a custom style
Customizing the map style can be done easily using MapTiler, if you do not want to write an entire JSON document by hand. Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
1. Create a free account at https://cloud.maptiler.com 1. Create a free account at https://cloud.maptiler.com
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there. 2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer. 3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account. 4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. MapTiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>![MapTiler Publication Settings](img/immich_map_styles_publish.webp) 5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>![Maptiler Publication Settings](img/immich_map_styles_publish.webp)
6. MapTiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay. 6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to MapTiler. 7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.

View File

@@ -1,7 +1,7 @@
# Database Queries # Database Queries
:::danger :::danger
Keep in mind that mucking around in the database might set the Moon on fire. Avoid modifying the database directly when possible, and always have current backups. Keep in mind that mucking around in the database might set the moon on fire. Avoid modifying the database directly when possible, and always have current backups.
::: :::
:::tip :::tip
@@ -27,14 +27,6 @@ SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09
SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%'; SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
``` ```
```sql title="Find by ID"
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
```
```sql title="Find by partial ID"
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
```
:::note :::note
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`. You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
::: :::

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -23,12 +23,12 @@ name: immich_remote_ml
services: services:
immich-machine-learning: immich-machine-learning:
container_name: immich_machine_learning container_name: immich_machine_learning
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag. # For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
# Example tag: ${IMMICH_VERSION:-release}-cuda # Example tag: ${IMMICH_VERSION:-release}-cuda
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release} image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# extends: # extends:
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable # service: # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
volumes: volumes:
- model-cache:/cache - model-cache:/cache
restart: always restart: always

View File

@@ -1,7 +1,3 @@
---
sidebar_position: 100
---
# Config File # Config File
A config file can be provided as an alternative to the UI configuration. A config file can be provided as an alternative to the UI configuration.
@@ -123,7 +119,7 @@ The default configuration looks like this:
"buttonText": "Login with OAuth", "buttonText": "Login with OAuth",
"clientId": "", "clientId": "",
"clientSecret": "", "clientSecret": "",
"defaultStorageQuota": null, "defaultStorageQuota": 0,
"enabled": false, "enabled": false,
"issuerUrl": "", "issuerUrl": "",
"mobileOverrideEnabled": false, "mobileOverrideEnabled": false,

Some files were not shown because too many files have changed in this diff Show More